h264: split current slice references into a separate structure
[nihav.git] / nihav-itu / src / codecs / h264 / types.rs
CommitLineData
22de733b 1use nihav_core::frame::NASimpleVideoFrame;
696e4e20
KS
2use nihav_codec_support::codecs::{MV, ZERO_MV};
3use nihav_codec_support::data::GenericCache;
56a17e69 4use super::SliceRefs;
15845d1a 5use super::pic_ref::FrameMBInfo;
696e4e20
KS
6
7#[repr(u8)]
8#[derive(Clone,Copy,Debug,PartialEq)]
9pub enum BMode {
10 L0,
11 L1,
12 Bi,
13}
14
15#[derive(Clone,Copy,Debug,PartialEq)]
16pub enum MBType {
17 Intra4x4,
18 Intra8x8,
19 Intra16x16(u8, u8, u8),
20 PCM,
21
22 P16x16,
23 P16x8,
24 P8x16,
25 P8x8,
26 P8x8Ref0,
27 PSkip,
28
29 Direct,
30 B16x16(BMode),
31 B16x8(BMode, BMode),
32 B8x16(BMode, BMode),
33 B8x8,
34 BSkip,
35}
36
37impl MBType {
38 pub fn is_intra(self) -> bool {
42005e25 39 matches!(self, MBType::Intra4x4 | MBType::Intra8x8 | MBType::Intra16x16(_, _, _) | MBType::PCM)
696e4e20
KS
40 }
41 pub fn is_intra16x16(self) -> bool {
42005e25 42 matches!(self, MBType::Intra16x16(_, _, _))
696e4e20
KS
43 }
44 pub fn is_skip(self) -> bool {
42005e25 45 matches!(self, MBType::PSkip | MBType::BSkip)
696e4e20
KS
46 }
47 pub fn is_4x4(self) -> bool { self.num_parts() == 4 }
48 pub fn is_l0(self, part: usize) -> bool {
49 match self {
50 MBType::B16x16(mode) => mode == BMode::L0,
51 MBType::B16x8(mode0, mode1) | MBType::B8x16(mode0, mode1) => {
52 if part == 0 {
53 mode0 == BMode::L0
54 } else {
55 mode1 == BMode::L0
56 }
57 },
58 MBType::Direct | MBType::BSkip => false,
59 _ => true,
60 }
61 }
62 pub fn is_l1(self, part: usize) -> bool {
63 match self {
64 MBType::B16x16(mode) => mode == BMode::L1,
65 MBType::B16x8(mode0, mode1) | MBType::B8x16(mode0, mode1) => {
66 if part == 0 {
67 mode0 == BMode::L1
68 } else {
69 mode1 == BMode::L1
70 }
71 },
72 _ => false,
73 }
74 }
75 pub fn num_parts(self) -> usize {
76 match self {
77 MBType::Intra4x4 | MBType::Intra8x8 | MBType::Intra16x16(_, _, _) | MBType::PCM |
78 MBType::PSkip |
79 MBType::Direct | MBType::BSkip
80 => 1,
81 MBType::P16x16 |
82 MBType::B16x16(_)
83 => 1,
84 MBType::P16x8 | MBType::P8x16 |
85 MBType::B16x8(_, _) | MBType::B8x16(_, _)
86 => 2,
87 _ => 4,
88 }
89 }
90 pub fn size(self) -> (usize, usize) {
91 match self {
92 MBType::Intra4x4 |
93 MBType::Intra8x8 |
94 MBType::Intra16x16(_, _, _) |
95 MBType::PCM |
96 MBType::P16x16 |
97 MBType::PSkip |
98 MBType::Direct |
99 MBType::B16x16(_) |
100 MBType::BSkip
101 => (16, 16),
102 MBType::P16x8 | MBType::B16x8(_, _) => (16, 8),
103 MBType::P8x16 | MBType::B8x16(_, _) => (8, 16),
104 _ => (8, 8),
105 }
106 }
107}
108
109impl Default for MBType {
110 fn default() -> Self { MBType::Intra4x4 }
111}
112
113#[derive(Clone,Copy,Debug,PartialEq)]
114pub enum SubMBType {
115 P8x8,
116 P8x4,
117 P4x8,
118 P4x4,
119 Direct8x8,
120 B8x8(BMode),
121 B8x4(BMode),
122 B4x8(BMode),
123 B4x4(BMode),
124}
125
126impl SubMBType {
127 pub fn num_parts(self) -> usize {
128 match self {
129 SubMBType::P8x8 | SubMBType::Direct8x8 | SubMBType::B8x8(_) => 1,
130 SubMBType::P4x4 | SubMBType::B4x4(_) => 4,
131 _ => 2,
132 }
133 }
134 pub fn size(self) -> (usize, usize) {
135 match self {
136 SubMBType::P8x8 | SubMBType::Direct8x8 | SubMBType::B8x8(_) => (8, 8),
137 SubMBType::P8x4 | SubMBType::B8x4(_) => (8, 4),
138 SubMBType::P4x8 | SubMBType::B4x8(_) => (4, 8),
139 SubMBType::P4x4 | SubMBType::B4x4(_) => (4, 4),
140 }
141 }
142 pub fn is_l0(self) -> bool {
143 match self {
144 SubMBType::B8x8(mode) | SubMBType::B8x4(mode) |
145 SubMBType::B4x8(mode) | SubMBType::B4x4(mode) => {
146 mode == BMode::L0
147 },
148 _ => true,
149 }
150 }
151 pub fn is_l1(self) -> bool {
152 match self {
153 SubMBType::B8x8(mode) | SubMBType::B8x4(mode) |
154 SubMBType::B4x8(mode) | SubMBType::B4x4(mode) => {
155 mode == BMode::L1
156 },
157 _ => false,
158 }
159 }
160}
161
162impl Default for SubMBType {
163 fn default() -> Self { SubMBType::Direct8x8 }
164}
165
166#[repr(u8)]
167#[derive(Clone,Copy,Debug,PartialEq)]
168pub enum CompactMBType {
169 Intra4x4,
170 Intra8x8,
171 Intra16x16,
172 PCM,
173
174 P16x16,
175 P16x8,
176 P8x16,
177 P8x8,
178 P8x8Ref0,
179 PSkip,
180
181 Direct,
182 B16x16,
183 B16x8,
184 B8x16,
185 B8x8,
186 BSkip,
187
188 None,
189}
190
191impl CompactMBType {
192 pub fn is_intra(self) -> bool {
42005e25 193 matches!(self, CompactMBType::Intra4x4 | CompactMBType::Intra8x8 | CompactMBType::Intra16x16)
696e4e20
KS
194 }
195 pub fn is_intra16orpcm(self) -> bool {
42005e25 196 matches!(self, CompactMBType::Intra16x16 | CompactMBType::PCM)
696e4e20
KS
197 }
198 pub fn is_skip(self) -> bool {
42005e25 199 matches!(self, CompactMBType::PSkip | CompactMBType::BSkip)
696e4e20
KS
200 }
201 pub fn is_direct(self) -> bool {
42005e25 202 matches!(self, CompactMBType::BSkip | CompactMBType::Direct | CompactMBType::None)
696e4e20
KS
203 }
204 pub fn is_inter(self) -> bool {
205 !self.is_intra() && !self.is_skip() && self != CompactMBType::PCM
206 }
495b7ec0 207 pub fn is_16x16_ref(self) -> bool {
42005e25 208 matches!(self,
495b7ec0
KS
209 CompactMBType::Intra4x4 |
210 CompactMBType::Intra8x8 |
211 CompactMBType::Intra16x16 |
212 CompactMBType::PCM |
213 CompactMBType::P16x16 |
42005e25 214 CompactMBType::B16x16)
696e4e20
KS
215 }
216}
217
218impl Default for CompactMBType {
219 fn default() -> Self { CompactMBType::None }
220}
221
222impl From<MBType> for CompactMBType {
223 fn from(mbtype: MBType) -> Self {
224 match mbtype {
225 MBType::Intra4x4 => CompactMBType::Intra4x4,
226 MBType::Intra8x8 => CompactMBType::Intra8x8,
227 MBType::Intra16x16(_, _, _) => CompactMBType::Intra16x16,
228 MBType::PCM => CompactMBType::PCM,
229 MBType::P16x16 => CompactMBType::P16x16,
230 MBType::P16x8 => CompactMBType::P16x8,
231 MBType::P8x16 => CompactMBType::P8x16,
232 MBType::P8x8 => CompactMBType::P8x8,
233 MBType::P8x8Ref0 => CompactMBType::P8x8Ref0,
234 MBType::PSkip => CompactMBType::PSkip,
235 MBType::Direct => CompactMBType::Direct,
236 MBType::B16x16(_) => CompactMBType::B16x16,
237 MBType::B16x8(_, _) => CompactMBType::B16x8,
238 MBType::B8x16(_, _) => CompactMBType::B8x16,
239 MBType::B8x8 => CompactMBType::B8x8,
240 MBType::BSkip => CompactMBType::BSkip,
241 }
242 }
243}
244
245#[repr(u8)]
246#[derive(Clone,Copy,Debug,PartialEq)]
247pub enum IntraPredMode {
248 Vertical,
249 Horizontal,
250 DC,
251 DiagDownLeft,
252 DiagDownRight,
253 VerRight,
254 HorDown,
255 VerLeft,
256 HorUp,
257 None,
258}
259
260impl IntraPredMode {
261 pub fn is_none(self) -> bool { self == IntraPredMode::None }
262 pub fn into_pred_idx(self) -> i8 {
263 if !self.is_none() {
264 self as u8 as i8
265 } else {
266 -1
267 }
268 }
269}
270
271impl Default for IntraPredMode {
272 fn default() -> Self { IntraPredMode::None }
273}
274
275impl From<u8> for IntraPredMode {
276 fn from(val: u8) -> Self {
277 match val {
278 0 => IntraPredMode::Vertical,
279 1 => IntraPredMode::Horizontal,
280 2 => IntraPredMode::DC,
281 3 => IntraPredMode::DiagDownLeft,
282 4 => IntraPredMode::DiagDownRight,
283 5 => IntraPredMode::VerRight,
284 6 => IntraPredMode::HorDown,
285 7 => IntraPredMode::VerLeft,
286 8 => IntraPredMode::HorUp,
287 _ => IntraPredMode::None,
288 }
289 }
290}
291
42005e25
KS
292impl From<IntraPredMode> for u8 {
293 fn from(val: IntraPredMode) -> Self {
294 match val {
696e4e20
KS
295 IntraPredMode::Vertical => 0,
296 IntraPredMode::Horizontal => 1,
297 IntraPredMode::DC => 2,
298 IntraPredMode::DiagDownLeft => 3,
299 IntraPredMode::DiagDownRight => 4,
300 IntraPredMode::VerRight => 5,
301 IntraPredMode::HorDown => 6,
302 IntraPredMode::VerLeft => 7,
303 IntraPredMode::HorUp => 8,
304 _ => 9,
305 }
306 }
307}
308
309pub const MISSING_POC: u16 = 0xFFFF;
310
311#[derive(Clone,Copy,Debug)]
312pub struct PicRef {
313 ref_idx: u8
314}
315
316pub const MISSING_REF: PicRef = PicRef { ref_idx: 0xFF };
317pub const INVALID_REF: PicRef = PicRef { ref_idx: 0xFE };
318pub const ZERO_REF: PicRef = PicRef { ref_idx: 0 };
319const DIRECT_FLAG: u8 = 0x40;
320
321impl PicRef {
322 pub fn new(ref_idx: u8) -> Self {
323 Self { ref_idx }
324 }
325 pub fn not_avail(self) -> bool {
326 self == MISSING_REF || self == INVALID_REF
327 }
328 pub fn index(self) -> usize { (self.ref_idx & !DIRECT_FLAG) as usize }
329 pub fn is_direct(self) -> bool { (self.ref_idx & DIRECT_FLAG) != 0 }
330 pub fn set_direct(&mut self) { self.ref_idx |= DIRECT_FLAG; }
331 fn min_pos(self, other: Self) -> Self {
332 match (self.not_avail(), other.not_avail()) {
333 (true, true) => self,
334 (false, true) => self,
335 (true, false) => other,
336 (false, false) => PicRef::new((self.ref_idx & !DIRECT_FLAG).min(other.ref_idx & !DIRECT_FLAG)),
337 }
338 }
339}
340
341impl Default for PicRef {
342 fn default() -> Self { MISSING_REF }
343}
344
345impl PartialEq for PicRef {
346 fn eq(&self, other: &Self) -> bool {
347 (self.ref_idx | DIRECT_FLAG) == (other.ref_idx | DIRECT_FLAG)
348 }
349}
350
351impl std::fmt::Display for PicRef {
352 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
353 if *self == MISSING_REF {
354 write!(f, "-1")
355 } else if *self == INVALID_REF {
356 write!(f, "-2")
357 } else {
358 write!(f, "{}", self.ref_idx & !DIRECT_FLAG)
359 }
360 }
361}
362
363#[derive(Clone,Copy,Default)]
364pub struct MBData {
365 pub mb_type: CompactMBType,
366 pub cbp: u8,
367 pub coded_flags: u32,
368 pub cmode: u8,
369 pub qp_y: u8,
370 pub qp_u: u8,
371 pub qp_v: u8,
372 pub transform_8x8: bool,
373}
374
375pub fn blk4_to_blk8(blk4: usize) -> usize {
376 const MAP: [usize; 16] = [ 0, 0, 1, 1, 0, 0, 1, 1, 2, 2, 3, 3, 2, 2, 3, 3 ];
377 MAP[blk4 & 0xF]
378}
379
380#[derive(Clone,Copy)]
381pub struct Blk8Data {
382 pub ref_idx: [PicRef; 2],
383 pub ncoded_c: [u8; 2],
384}
385
386impl Default for Blk8Data {
387 fn default() -> Self {
388 Self {
389 ref_idx: [MISSING_REF; 2],
390 ncoded_c: [0; 2],
391 }
392 }
393}
394
395#[derive(Clone,Copy,Default)]
396pub struct Blk4Data {
397 pub ncoded: u8,
398 pub ipred: IntraPredMode,
399 pub mv: [MV; 2],
400 pub mvd: [MV; 2],
401}
402
403pub struct SliceState {
404 pub mb_x: usize,
405 pub mb_y: usize,
406 pub mb_w: usize,
407 pub mb_h: usize,
408 pub mb_start: usize,
409
410 pub mb: GenericCache<MBData>,
411 pub blk8: GenericCache<Blk8Data>,
412 pub blk4: GenericCache<Blk4Data>,
413
22de733b 414 pub deblock: [u8; 16],
696e4e20
KS
415
416 pub has_top: bool,
417 pub has_left: bool,
22de733b
KS
418
419 pub top_line_y: Vec<u8>,
420 pub left_y: [u8; 17], // first element is top-left
421 pub top_line_c: [Vec<u8>; 2],
422 pub left_c: [[u8; 9]; 2],
696e4e20
KS
423}
424
d85f94f7
KS
425const BLK4_TO_D8: [usize; 16] = [ 0, 0, 3, 3, 0, 0, 3, 3, 12, 12, 15, 15, 12, 12, 15, 15 ];
426
696e4e20
KS
427impl SliceState {
428 pub fn new() -> Self {
429 Self {
430 mb_x: 0,
431 mb_y: 0,
432 mb_w: 0,
433 mb_h: 0,
434 mb_start: 0,
435 mb: GenericCache::new(0, 0, MBData::default()),
436 blk8: GenericCache::new(0, 0, Blk8Data::default()),
437 blk4: GenericCache::new(0, 0, Blk4Data::default()),
438
22de733b 439 deblock: [0; 16],
696e4e20
KS
440
441 has_top: false,
442 has_left: false,
22de733b
KS
443
444 top_line_y: Vec::new(),
445 left_y: [0; 17],
446 top_line_c: [Vec::new(), Vec::new()],
447 left_c: [[0; 9]; 2],
696e4e20
KS
448 }
449 }
450 pub fn reset(&mut self, mb_w: usize, mb_h: usize, mb_pos: usize) {
451 self.mb_w = mb_w;
452 self.mb_h = mb_h;
453 self.mb_start = mb_pos;
454 if mb_w > 0 {
455 self.mb_x = mb_pos % mb_w;
456 self.mb_y = mb_pos / mb_w;
457 } else {
458 self.mb_x = 0;
459 self.mb_y = 0;
460 }
461 self.mb = GenericCache::new(1, mb_w + 2, MBData::default());
462 self.blk8 = GenericCache::new(2, mb_w * 2 + 2, Blk8Data::default());
463 self.blk4 = GenericCache::new(4, mb_w * 4 + 2, Blk4Data::default());
464
696e4e20
KS
465 self.has_top = false;
466 self.has_left = false;
22de733b
KS
467
468 self.top_line_y.resize(mb_w * 16 + 1, 0x80);
469 self.top_line_c[0].resize(mb_w * 8 + 1, 0x80);
470 self.top_line_c[1].resize(mb_w * 8 + 1, 0x80);
471 self.left_y = [0x80; 17];
472 self.left_c = [[0x80; 9]; 2];
696e4e20 473 }
22de733b
KS
474 pub fn save_ipred_context(&mut self, frm: &NASimpleVideoFrame<u8>) {
475 let dstoff = self.mb_x * 16;
476 let srcoff = frm.offset[0] + self.mb_x * 16 + self.mb_y * 16 * frm.stride[0];
477 self.left_y[0] = self.top_line_y[dstoff + 15];
478 self.top_line_y[dstoff..][..16].copy_from_slice(&frm.data[srcoff + frm.stride[0] * 15..][..16]);
479 for (dst, src) in self.left_y[1..].iter_mut().zip(frm.data[srcoff..].chunks(frm.stride[0])) {
480 *dst = src[15];
481 }
482 for chroma in 0..2 {
483 let cstride = frm.stride[chroma + 1];
484 let dstoff = self.mb_x * 8;
485 let srcoff = frm.offset[chroma + 1] + self.mb_x * 8 + self.mb_y * 8 * cstride;
486 self.left_c[chroma][0] = self.top_line_c[chroma][dstoff + 7];
487 self.top_line_c[chroma][dstoff..][..8].copy_from_slice(&frm.data[srcoff + cstride * 7..][..8]);
488 for (dst, src) in self.left_c[chroma][1..].iter_mut().zip(frm.data[srcoff..].chunks(cstride)) {
489 *dst = src[7];
490 }
491 }
492 }
56a17e69 493 pub fn fill_deblock(&mut self, frefs: &SliceRefs, deblock_mode: u8, is_s: bool) {
696e4e20
KS
494 if deblock_mode == 1 {
495 return;
496 }
497
22de733b
KS
498 self.deblock = [0; 16];
499
696e4e20
KS
500 let tx8x8 = self.get_cur_mb().transform_8x8;
501
15845d1a
KS
502 let cur_intra = self.get_cur_mb().mb_type.is_intra();
503 let left_intra = self.get_left_mb().mb_type.is_intra();
504 let mut top_intra = self.get_top_mb().mb_type.is_intra();
696e4e20 505 for y in 0..4 {
696e4e20 506 let can_do_top = y != 0 || (self.mb_y != 0 && (self.has_top || deblock_mode != 2));
22de733b 507 if can_do_top && (!tx8x8 || (y & 1) == 0) {
15845d1a 508 if is_s || cur_intra || top_intra {
696e4e20 509 let val = if y == 0 { 0x40 } else { 0x30 };
22de733b 510 for el in self.deblock[y * 4..][..4].iter_mut() { *el |= val; }
696e4e20
KS
511 } else {
512 for x in 0..4 {
22de733b
KS
513 let blk4 = x + y * 4;
514 let blk8 = x / 2 + (y / 2) * 2;
515 if self.get_cur_blk4(blk4).ncoded != 0 || self.get_top_blk4(blk4).ncoded != 0 {
516 self.deblock[y * 4 + x] |= 0x20;
696e4e20 517 } else {
22de733b
KS
518 let cur_mv = self.get_cur_blk4(blk4).mv;
519 let top_mv = self.get_top_blk4(blk4).mv;
520 let cur_ref = self.get_cur_blk8(blk8).ref_idx;
521 let top_ref = if (y & 1) == 0 { self.get_top_blk8(blk8).ref_idx } else { cur_ref };
522 if mvdiff4(cur_mv[0], top_mv[0]) || mvdiff4(cur_mv[1], top_mv[1]) || !frefs.cmp_refs(cur_ref, top_ref) {
523 self.deblock[y * 4 + x] |= 0x10;
696e4e20
KS
524 }
525 }
526 }
527 }
528 }
15845d1a 529 let mut lleft_intra = left_intra;
696e4e20 530 for x in 0..4 {
22de733b 531 let skip_8 = tx8x8 && (x & 1) != 0;
696e4e20
KS
532 let can_do_left = x > 0 || self.has_left || (self.mb_x != 0 && deblock_mode != 2);
533 if !can_do_left {
534 continue;
535 }
536 let blk4 = x + y * 4;
537 let blk8 = x / 2 + (y / 2) * 2;
22de733b 538 if skip_8 {
15845d1a 539 } else if is_s || cur_intra || lleft_intra {
22de733b
KS
540 self.deblock[y * 4 + x] |= if x == 0 { 4 } else { 3 };
541 } else if self.get_cur_blk4(blk4).ncoded != 0 || self.get_left_blk4(blk4).ncoded != 0 {
542 self.deblock[y * 4 + x] |= 2;
696e4e20
KS
543 } else {
544 let cur_mv = self.get_cur_blk4(blk4).mv;
545 let left_mv = self.get_left_blk4(blk4).mv;
546 let cur_ref = self.get_cur_blk8(blk8).ref_idx;
22de733b
KS
547 let left_ref = if (x & 1) == 0 { self.get_left_blk8(blk8).ref_idx } else { cur_ref };
548 if mvdiff4(cur_mv[0], left_mv[0]) || mvdiff4(cur_mv[1], left_mv[1]) || !frefs.cmp_refs(cur_ref, left_ref) {
549 self.deblock[y * 4 + x] |= 1;
696e4e20
KS
550 }
551 }
15845d1a 552 lleft_intra = cur_intra;
696e4e20 553 }
15845d1a 554 top_intra = cur_intra;
696e4e20
KS
555 }
556 }
557 pub fn next_mb(&mut self) {
558 self.mb_x += 1;
559 self.has_left = true;
560 if self.mb_x == self.mb_w {
561 self.mb_x = 0;
562 self.mb_y += 1;
563 self.mb.update_row();
564 self.blk8.update_row();
565 self.blk4.update_row();
566
696e4e20
KS
567 self.has_left = false;
568 }
569 self.has_top = self.mb_x + self.mb_y * self.mb_w >= self.mb_start + self.mb_w;
570 }
571 pub fn get_cur_mb_idx(&self) -> usize { self.mb.xpos + self.mb_x }
572 pub fn get_cur_blk8_idx(&self, blk_no: usize) -> usize {
573 self.blk8.xpos + self.mb_x * 2 + (blk_no & 1) + (blk_no >> 1) * self.blk8.stride
574 }
575 pub fn get_cur_blk4_idx(&self, blk_no: usize) -> usize {
576 self.blk4.xpos + self.mb_x * 4 + (blk_no & 3) + (blk_no >> 2) * self.blk4.stride
577 }
578 pub fn get_cur_mb(&mut self) -> &mut MBData {
579 let idx = self.get_cur_mb_idx();
580 &mut self.mb.data[idx]
581 }
582 pub fn get_left_mb(&self) -> &MBData {
583 &self.mb.data[self.get_cur_mb_idx() - 1]
584 }
585 pub fn get_top_mb(&self) -> &MBData {
586 &self.mb.data[self.get_cur_mb_idx() - self.mb.stride]
587 }
588 pub fn get_cur_blk8(&mut self, blk_no: usize) -> &mut Blk8Data {
589 let idx = self.get_cur_blk8_idx(blk_no);
590 &mut self.blk8.data[idx]
591 }
592 pub fn get_left_blk8(&self, blk_no: usize) -> &Blk8Data {
593 &self.blk8.data[self.get_cur_blk8_idx(blk_no) - 1]
594 }
595 pub fn get_top_blk8(&self, blk_no: usize) -> &Blk8Data {
596 &self.blk8.data[self.get_cur_blk8_idx(blk_no) - self.blk8.stride]
597 }
598 pub fn get_cur_blk4(&mut self, blk_no: usize) -> &mut Blk4Data {
599 let idx = self.get_cur_blk4_idx(blk_no);
600 &mut self.blk4.data[idx]
601 }
602 pub fn get_left_blk4(&self, blk_no: usize) -> &Blk4Data {
603 &self.blk4.data[self.get_cur_blk4_idx(blk_no) - 1]
604 }
605 pub fn get_top_blk4(&self, blk_no: usize) -> &Blk4Data {
606 &self.blk4.data[self.get_cur_blk4_idx(blk_no) - self.blk4.stride]
607 }
608
609 pub fn apply_to_blk8<F: (Fn(&mut Blk8Data))>(&mut self, f: F) {
610 let start = self.get_cur_blk8_idx(0);
611 for row in self.blk8.data[start..].chunks_mut(self.blk8.stride).take(2) {
612 for el in row[..2].iter_mut() {
613 f(el);
614 }
615 }
616 }
617 pub fn apply_to_blk4<F: (Fn(&mut Blk4Data))>(&mut self, f: F) {
618 let start = self.get_cur_blk4_idx(0);
619 for row in self.blk4.data[start..].chunks_mut(self.blk4.stride).take(4) {
620 for el in row[..4].iter_mut() {
621 f(el);
622 }
623 }
624 }
625
626 pub fn fill_ipred(&mut self, imode: IntraPredMode) {
627 self.apply_to_blk4(|blk| blk.ipred = imode);
628 }
629 pub fn fill_ncoded(&mut self, nc: u8) {
630 self.apply_to_blk4(|blk| blk.ncoded = nc);
631 self.apply_to_blk8(|blk| blk.ncoded_c = [nc; 2]);
632 }
633 pub fn reset_mb_mv(&mut self) {
634 self.apply_to_blk8(|blk| blk.ref_idx = [INVALID_REF; 2]);
635 }
636
637 pub fn get_mv_ctx(&self, xoff: usize, yoff: usize, ref_l: usize) -> (usize, usize) {
638 let blk_no = xoff / 4 + yoff;
639 let mv_a = self.get_left_blk4(blk_no).mvd[ref_l];
640 let mv_b = self.get_top_blk4(blk_no).mvd[ref_l];
641 let mv = mv_a + mv_b;
642 let ctx0 = if mv.x < 3 { 0 } else if mv.x <= 32 { 1 } else { 2 };
643 let ctx1 = if mv.y < 3 { 0 } else if mv.y <= 32 { 1 } else { 2 };
644 (ctx0, ctx1)
645 }
646 pub fn get_mv_ref_ctx(&self, xoff: usize, yoff: usize, ref_l: usize) -> usize {
647 let blk_no = xoff / 8 + (yoff / 8) * 2;
648 let mut ctx = 0;
649 let left_ref = self.get_left_blk8(blk_no).ref_idx[ref_l];
650 let top_ref = self.get_top_blk8(blk_no).ref_idx[ref_l];
651 if !left_ref.not_avail() && !left_ref.is_direct() && left_ref.index() > 0 {
652 ctx += 1;
653 }
654 if !top_ref.not_avail() && !top_ref.is_direct() && top_ref.index() > 0 {
655 ctx += 2;
656 }
657 ctx
658 }
4a1ca15c 659 #[allow(clippy::if_same_then_else)]
696e4e20
KS
660 pub fn predict(&mut self, xpos: usize, ypos: usize, bw: usize, bh: usize, ref_l: usize, diff_mv: MV, ref_idx: PicRef) {
661 let midx = self.get_cur_blk4_idx(0) + xpos / 4 + ypos / 4 * self.blk4.stride;
662 let ridx = self.get_cur_blk8_idx(0) + xpos / 8 + ypos / 8 * self.blk8.stride;
663 let ridx_c = self.get_cur_blk8_idx(0) + (xpos + bw) / 8 + ypos / 8 * self.blk8.stride - if (ypos & 4) == 0 { self.blk8.stride } else { 0 };
664
665 let mv_a = self.blk4.data[midx - 1].mv[ref_l];
666 let mv_b = self.blk4.data[midx - self.blk4.stride].mv[ref_l];
667 let mut mv_c = self.blk4.data[midx - self.blk4.stride + bw / 4].mv[ref_l];
668
669 let rx = if (xpos & 4) != 0 { 0 } else { 1 };
670 let ry = if (ypos & 4) != 0 { 0 } else { self.blk8.stride };
671 let ref_a = self.blk8.data[ridx - rx].ref_idx[ref_l];
672 let ref_b = self.blk8.data[ridx - ry].ref_idx[ref_l];
673 let mut ref_c = self.blk8.data[ridx_c].ref_idx[ref_l];
674
675 if ref_c == MISSING_REF || (((xpos + bw) & 4) == 0 && (ypos & 4) != 0) {
676 mv_c = self.blk4.data[midx - self.blk4.stride - 1].mv[ref_l];
677 ref_c = self.blk8.data[ridx - rx - ry].ref_idx[ref_l];
678 }
679
680 let pred_mv = if bw == 16 && bh == 8 && ypos == 0 && ref_b == ref_idx {
681 mv_b
682 } else if bw == 16 && bh == 8 && ypos != 0 && ref_a == ref_idx {
683 mv_a
684 } else if bw == 8 && bh == 16 && xpos == 0 && ref_a == ref_idx {
685 mv_a
686 } else if bw == 8 && bh == 16 && xpos != 0 && ref_c == ref_idx {
687 mv_c
688 } else if ref_b == MISSING_REF && ref_c == MISSING_REF {
689 mv_a
690 } else {
691 let count = ((ref_a == ref_idx) as u8) + ((ref_b == ref_idx) as u8) + ((ref_c == ref_idx) as u8);
692 if count == 1 {
693 if ref_a == ref_idx {
694 mv_a
695 } else if ref_b == ref_idx {
696 mv_b
697 } else {
698 mv_c
699 }
700 } else {
701 MV::pred(mv_a, mv_b, mv_c)
702 }
703 };
704
705 let mv = pred_mv + diff_mv;
706 self.fill_mv (xpos, ypos, bw, bh, ref_l, mv);
707 self.fill_ref(xpos, ypos, bw, bh, ref_l, ref_idx);
708 }
709 pub fn predict_pskip(&mut self) {
710 let midx = self.get_cur_blk4_idx(0);
711 let ridx = self.get_cur_blk8_idx(0);
712
713 let mv_a = self.blk4.data[midx - 1].mv[0];
714 let mv_b = self.blk4.data[midx - self.blk4.stride].mv[0];
715 let mut mv_c = self.blk4.data[midx - self.blk4.stride + 4].mv[0];
716
717 let ref_a = self.blk8.data[ridx - 1].ref_idx[0];
718 let ref_b = self.blk8.data[ridx - self.blk8.stride].ref_idx[0];
719 let mut ref_c = self.blk8.data[ridx - self.blk8.stride + 2].ref_idx[0];
720
721 if ref_c == MISSING_REF {
722 mv_c = self.blk4.data[midx - self.blk4.stride - 1].mv[0];
723 ref_c = self.blk8.data[ridx - self.blk8.stride - 1].ref_idx[0];
724 }
725
726 let ref_idx = ZERO_REF;
727 let mv = if ref_a == MISSING_REF || ref_b == MISSING_REF || (ref_a == ZERO_REF && mv_a == ZERO_MV) || (ref_b == ZERO_REF && mv_b == ZERO_MV) {
728 ZERO_MV
729 } else {
730 let count = ((ref_a == ref_idx) as u8) + ((ref_b == ref_idx) as u8) + ((ref_c == ref_idx) as u8);
731 if count == 1 {
732 if ref_a == ref_idx {
733 mv_a
734 } else if ref_b == ref_idx {
735 mv_b
736 } else {
737 mv_c
738 }
739 } else {
740 MV::pred(mv_a, mv_b, mv_c)
741 }
742 };
743
744 self.fill_mv (0, 0, 16, 16, 0, mv);
745 self.fill_ref(0, 0, 16, 16, 0, ref_idx);
746 }
56a17e69 747 pub fn predict_direct_mb(&mut self, frame_refs: &SliceRefs, temporal_mv: bool, direct_8x8: bool, cur_id: u16) {
15845d1a 748 let (col_mb, r1_poc, r1_long) = frame_refs.get_colocated_info(self.mb_x, self.mb_y);
d85f94f7
KS
749 if direct_8x8 {
750 for blk4 in 0..16 {
15845d1a 751 let (mv0, ref0, mv1, ref1) = self.get_direct_mv(frame_refs, &col_mb, r1_poc, r1_long, temporal_mv, cur_id, BLK4_TO_D8[blk4]);
d85f94f7
KS
752 self.get_cur_blk4(blk4).mv = [mv0, mv1];
753 self.get_cur_blk8(blk4_to_blk8(blk4)).ref_idx = [ref0, ref1];
754 }
755 } else if col_mb.mb_type.is_16x16_ref() || !temporal_mv {
15845d1a 756 let (mv0, ref0, mv1, ref1) = self.get_direct_mv(frame_refs, &col_mb, r1_poc, r1_long, temporal_mv, cur_id, 0);
696e4e20
KS
757 self.apply_to_blk4(|blk4| blk4.mv = [mv0, mv1]);
758 self.apply_to_blk8(|blk8| blk8.ref_idx = [ref0, ref1]);
759 } else {
760 for blk4 in 0..16 {
15845d1a 761 let (mv0, ref0, mv1, ref1) = self.get_direct_mv(frame_refs, &col_mb, r1_poc, r1_long, temporal_mv, cur_id, blk4);
696e4e20
KS
762 self.get_cur_blk4(blk4).mv = [mv0, mv1];
763 self.get_cur_blk8(blk4_to_blk8(blk4)).ref_idx = [ref0, ref1];
764 }
765 }
766 }
56a17e69 767 pub fn predict_direct_sub(&mut self, frame_refs: &SliceRefs, temporal_mv: bool, direct8x8: bool, cur_id: u16, blk4: usize) {
d85f94f7 768 let src_blk = if !direct8x8 { blk4 } else { BLK4_TO_D8[blk4] };
15845d1a
KS
769 let (mbi, r1_poc, r1_long) = frame_refs.get_colocated_info(self.mb_x, self.mb_y);
770 let (mv0, ref0, mv1, ref1) = self.get_direct_mv(frame_refs, &mbi, r1_poc, r1_long, temporal_mv, cur_id, src_blk);
4a1ca15c
KS
771 self.get_cur_blk4(blk4).mv = [mv0, mv1];
772 self.get_cur_blk8(blk4_to_blk8(blk4)).ref_idx = [ref0, ref1];
696e4e20 773 }
42005e25 774 #[allow(clippy::nonminimal_bool)]
56a17e69 775 pub fn get_direct_mv(&self, frame_refs: &SliceRefs, mbi: &FrameMBInfo, r1_poc: u16, r1_long: bool, temporal_mv: bool, cur_id: u16, blk4: usize) -> (MV, PicRef, MV, PicRef) {
696e4e20
KS
776 let blk8 = blk4_to_blk8(blk4);
777 let (col_mv, r0_poc, col_idx) = if mbi.ref_poc[blk8] == [MISSING_POC; 2] {
778 (ZERO_MV, MISSING_POC, MISSING_REF)
779 } else if mbi.ref_poc[blk8][0] != MISSING_POC {
780 (mbi.mv[blk4][0], mbi.ref_poc[blk8][0], mbi.ref_idx[blk8][0])
781 } else {
782 (mbi.mv[blk4][1], mbi.ref_poc[blk8][1], mbi.ref_idx[blk8][1])
783 };
784 let (col_ref, r0_long) = frame_refs.map_ref0(r0_poc);
785 if temporal_mv {
786 let td = (i32::from(r1_poc) - i32::from(r0_poc)).max(-128).min(127);
787 if r0_long || td == 0 {
788 (col_mv, col_ref, ZERO_MV, ZERO_REF)
789 } else {
790 let tx = (16384 + (td / 2).abs()) / td;
791 let tb = (i32::from(cur_id) - i32::from(r0_poc)).max(-128).min(127);
792 let scale = ((tb * tx + 32) >> 6).max(-1024).min(1023);
793 let mv0 = MV {
794 x: ((i32::from(col_mv.x) * scale + 128) >> 8) as i16,
795 y: ((i32::from(col_mv.y) * scale + 128) >> 8) as i16,
796 };
797 let mv1 = mv0 - col_mv;
798 (mv0, col_ref, mv1, ZERO_REF)
799 }
800 } else {
801 let blk4 = 0; // we generate the same MV prediction for the whole MB
802 let blk8 = blk4_to_blk8(blk4);
803 let midx = self.get_cur_blk4_idx(blk4);
804 let ridx = self.get_cur_blk8_idx(blk8);
805 let ridx_c = self.get_cur_blk8_idx(blk8) + 16 / 8 - self.blk8.stride;
806
807 let mv_a = self.blk4.data[midx - 1].mv;
808 let mv_b = self.blk4.data[midx - self.blk4.stride].mv;
809 let mut mv_c = self.blk4.data[midx - self.blk4.stride + 16 / 4].mv;
810
811 let ref_a = self.blk8.data[ridx - 1].ref_idx;
812 let ref_b = self.blk8.data[ridx - self.blk8.stride].ref_idx;
813 let mut ref_c = self.blk8.data[ridx_c].ref_idx;
814
815 if ref_c == [MISSING_REF; 2] {
816 mv_c = self.blk4.data[midx - self.blk4.stride - 1].mv;
817 ref_c = self.blk8.data[ridx - self.blk8.stride - 1].ref_idx;
818 }
819 let mut refs = [INVALID_REF; 2];
820 for cur_ref in [ref_a, ref_b, ref_c].iter() {
821 refs[0] = refs[0].min_pos(cur_ref[0]);
822 refs[1] = refs[1].min_pos(cur_ref[1]);
823 }
824 if refs == [INVALID_REF; 2] {
825 return (ZERO_MV, ZERO_REF, ZERO_MV, ZERO_REF);
826 }
827
828 let mut col_zero = true;
829 if r1_long || col_idx != ZERO_REF {
830 col_zero = false;
831 }
832 if col_mv.x.abs() > 1 || col_mv.y.abs() > 1 {
833 col_zero = false;
834 }
835 let mut mvs = [ZERO_MV; 2];
836 for ref_l in 0..2 {
837 if mbi.mb_type.is_intra() || (!refs[ref_l].not_avail() && !(refs[ref_l] == ZERO_REF && col_zero)) {
838 let ref_idx = refs[ref_l];
839 mvs[ref_l] = if ref_b[ref_l] == MISSING_REF && ref_c[ref_l] == MISSING_REF {
840 mv_a[ref_l]
841 } else {
842 let count = ((ref_a[ref_l] == ref_idx) as u8) + ((ref_b[ref_l] == ref_idx) as u8) + ((ref_c[ref_l] == ref_idx) as u8);
843 if count == 1 {
844 if ref_a[ref_l] == ref_idx {
845 mv_a[ref_l]
846 } else if ref_b[ref_l] == ref_idx {
847 mv_b[ref_l]
848 } else {
849 mv_c[ref_l]
850 }
851 } else {
852 MV::pred(mv_a[ref_l], mv_b[ref_l], mv_c[ref_l])
853 }
854 };
855 }
856 }
857 (mvs[0], refs[0], mvs[1], refs[1])
858 }
859 }
860 pub fn fill_mv(&mut self, xpos: usize, ypos: usize, bw: usize, bh: usize, ref_l: usize, mv: MV) {
861 let start = self.get_cur_blk4_idx(0) + xpos / 4 + ypos / 4 * self.blk4.stride;
862 for row in self.blk4.data[start..].chunks_mut(self.blk4.stride).take(bh / 4) {
863 for blk in row[..bw / 4].iter_mut() {
864 blk.mv[ref_l] = mv;
865 }
866 }
867 }
868 pub fn fill_mvd(&mut self, xpos: usize, ypos: usize, bw: usize, bh: usize, ref_l: usize, mv: MV) {
869 let mvd = MV{ x: mv.x.abs().min(128), y: mv.y.abs().min(128) };
870 let start = self.get_cur_blk4_idx(0) + xpos / 4 + ypos / 4 * self.blk4.stride;
871 for row in self.blk4.data[start..].chunks_mut(self.blk4.stride).take(bh / 4) {
872 for blk in row[..bw / 4].iter_mut() {
873 blk.mvd[ref_l] = mvd;
874 }
875 }
876 }
877 pub fn fill_ref(&mut self, xpos: usize, ypos: usize, bw: usize, bh: usize, ref_l: usize, ref_idx: PicRef) {
878 let start = self.get_cur_blk8_idx(0) + xpos / 8 + ypos / 8 * self.blk8.stride;
879 if bw < 8 || bh < 8 {
880 self.blk8.data[start].ref_idx[ref_l] = ref_idx;
881 } else {
882 for row in self.blk8.data[start..].chunks_mut(self.blk8.stride).take(bh / 8) {
883 for blk in row[..bw / 8].iter_mut() {
884 blk.ref_idx[ref_l] = ref_idx;
885 }
886 }
887 }
888 }
889}
890
891fn mvdiff4(mv1: MV, mv2: MV) -> bool {
892 let mv = mv1 - mv2;
893 (mv.x.abs() >= 4) || (mv.y.abs() >= 4)
894}