h264: fix direct 8x8 inference mode
[nihav.git] / nihav-itu / src / codecs / h264 / types.rs
CommitLineData
22de733b 1use nihav_core::frame::NASimpleVideoFrame;
696e4e20
KS
2use nihav_codec_support::codecs::{MV, ZERO_MV};
3use nihav_codec_support::data::GenericCache;
4use super::FrameRefs;
5
6#[repr(u8)]
7#[derive(Clone,Copy,Debug,PartialEq)]
8pub enum BMode {
9 L0,
10 L1,
11 Bi,
12}
13
14#[derive(Clone,Copy,Debug,PartialEq)]
15pub enum MBType {
16 Intra4x4,
17 Intra8x8,
18 Intra16x16(u8, u8, u8),
19 PCM,
20
21 P16x16,
22 P16x8,
23 P8x16,
24 P8x8,
25 P8x8Ref0,
26 PSkip,
27
28 Direct,
29 B16x16(BMode),
30 B16x8(BMode, BMode),
31 B8x16(BMode, BMode),
32 B8x8,
33 BSkip,
34}
35
36impl MBType {
37 pub fn is_intra(self) -> bool {
38 match self {
39 MBType::Intra4x4 | MBType::Intra8x8 | MBType::Intra16x16(_, _, _) | MBType::PCM => true,
40 _ => false,
41 }
42 }
43 pub fn is_intra16x16(self) -> bool {
44 if let MBType::Intra16x16(_, _, _) = self {
45 true
46 } else {
47 false
48 }
49 }
50 pub fn is_skip(self) -> bool {
51 match self {
52 MBType::PSkip | MBType::BSkip => true,
53 _ => false,
54 }
55 }
56 pub fn is_4x4(self) -> bool { self.num_parts() == 4 }
57 pub fn is_l0(self, part: usize) -> bool {
58 match self {
59 MBType::B16x16(mode) => mode == BMode::L0,
60 MBType::B16x8(mode0, mode1) | MBType::B8x16(mode0, mode1) => {
61 if part == 0 {
62 mode0 == BMode::L0
63 } else {
64 mode1 == BMode::L0
65 }
66 },
67 MBType::Direct | MBType::BSkip => false,
68 _ => true,
69 }
70 }
71 pub fn is_l1(self, part: usize) -> bool {
72 match self {
73 MBType::B16x16(mode) => mode == BMode::L1,
74 MBType::B16x8(mode0, mode1) | MBType::B8x16(mode0, mode1) => {
75 if part == 0 {
76 mode0 == BMode::L1
77 } else {
78 mode1 == BMode::L1
79 }
80 },
81 _ => false,
82 }
83 }
84 pub fn num_parts(self) -> usize {
85 match self {
86 MBType::Intra4x4 | MBType::Intra8x8 | MBType::Intra16x16(_, _, _) | MBType::PCM |
87 MBType::PSkip |
88 MBType::Direct | MBType::BSkip
89 => 1,
90 MBType::P16x16 |
91 MBType::B16x16(_)
92 => 1,
93 MBType::P16x8 | MBType::P8x16 |
94 MBType::B16x8(_, _) | MBType::B8x16(_, _)
95 => 2,
96 _ => 4,
97 }
98 }
99 pub fn size(self) -> (usize, usize) {
100 match self {
101 MBType::Intra4x4 |
102 MBType::Intra8x8 |
103 MBType::Intra16x16(_, _, _) |
104 MBType::PCM |
105 MBType::P16x16 |
106 MBType::PSkip |
107 MBType::Direct |
108 MBType::B16x16(_) |
109 MBType::BSkip
110 => (16, 16),
111 MBType::P16x8 | MBType::B16x8(_, _) => (16, 8),
112 MBType::P8x16 | MBType::B8x16(_, _) => (8, 16),
113 _ => (8, 8),
114 }
115 }
116}
117
118impl Default for MBType {
119 fn default() -> Self { MBType::Intra4x4 }
120}
121
122#[derive(Clone,Copy,Debug,PartialEq)]
123pub enum SubMBType {
124 P8x8,
125 P8x4,
126 P4x8,
127 P4x4,
128 Direct8x8,
129 B8x8(BMode),
130 B8x4(BMode),
131 B4x8(BMode),
132 B4x4(BMode),
133}
134
135impl SubMBType {
136 pub fn num_parts(self) -> usize {
137 match self {
138 SubMBType::P8x8 | SubMBType::Direct8x8 | SubMBType::B8x8(_) => 1,
139 SubMBType::P4x4 | SubMBType::B4x4(_) => 4,
140 _ => 2,
141 }
142 }
143 pub fn size(self) -> (usize, usize) {
144 match self {
145 SubMBType::P8x8 | SubMBType::Direct8x8 | SubMBType::B8x8(_) => (8, 8),
146 SubMBType::P8x4 | SubMBType::B8x4(_) => (8, 4),
147 SubMBType::P4x8 | SubMBType::B4x8(_) => (4, 8),
148 SubMBType::P4x4 | SubMBType::B4x4(_) => (4, 4),
149 }
150 }
151 pub fn is_l0(self) -> bool {
152 match self {
153 SubMBType::B8x8(mode) | SubMBType::B8x4(mode) |
154 SubMBType::B4x8(mode) | SubMBType::B4x4(mode) => {
155 mode == BMode::L0
156 },
157 _ => true,
158 }
159 }
160 pub fn is_l1(self) -> bool {
161 match self {
162 SubMBType::B8x8(mode) | SubMBType::B8x4(mode) |
163 SubMBType::B4x8(mode) | SubMBType::B4x4(mode) => {
164 mode == BMode::L1
165 },
166 _ => false,
167 }
168 }
169}
170
171impl Default for SubMBType {
172 fn default() -> Self { SubMBType::Direct8x8 }
173}
174
175#[repr(u8)]
176#[derive(Clone,Copy,Debug,PartialEq)]
177pub enum CompactMBType {
178 Intra4x4,
179 Intra8x8,
180 Intra16x16,
181 PCM,
182
183 P16x16,
184 P16x8,
185 P8x16,
186 P8x8,
187 P8x8Ref0,
188 PSkip,
189
190 Direct,
191 B16x16,
192 B16x8,
193 B8x16,
194 B8x8,
195 BSkip,
196
197 None,
198}
199
200impl CompactMBType {
201 pub fn is_intra(self) -> bool {
202 match self {
203 CompactMBType::Intra4x4 | CompactMBType::Intra8x8 | CompactMBType::Intra16x16 => true,
204 _ => false,
205 }
206 }
207 pub fn is_intra16orpcm(self) -> bool {
208 match self {
209 CompactMBType::Intra16x16 | CompactMBType::PCM => true,
210 _ => false,
211 }
212 }
213 pub fn is_skip(self) -> bool {
214 match self {
215 CompactMBType::PSkip | CompactMBType::BSkip => true,
216 _ => false,
217 }
218 }
219 pub fn is_direct(self) -> bool {
220 match self {
221 CompactMBType::BSkip | CompactMBType::Direct | CompactMBType::None => true,
222 _ => false,
223 }
224 }
225 pub fn is_inter(self) -> bool {
226 !self.is_intra() && !self.is_skip() && self != CompactMBType::PCM
227 }
495b7ec0 228 pub fn is_16x16_ref(self) -> bool {
696e4e20 229 match self {
495b7ec0
KS
230 CompactMBType::Intra4x4 |
231 CompactMBType::Intra8x8 |
232 CompactMBType::Intra16x16 |
233 CompactMBType::PCM |
234 CompactMBType::P16x16 |
235 CompactMBType::B16x16 => true,
236 _ => false,
696e4e20
KS
237 }
238 }
239}
240
241impl Default for CompactMBType {
242 fn default() -> Self { CompactMBType::None }
243}
244
245impl From<MBType> for CompactMBType {
246 fn from(mbtype: MBType) -> Self {
247 match mbtype {
248 MBType::Intra4x4 => CompactMBType::Intra4x4,
249 MBType::Intra8x8 => CompactMBType::Intra8x8,
250 MBType::Intra16x16(_, _, _) => CompactMBType::Intra16x16,
251 MBType::PCM => CompactMBType::PCM,
252 MBType::P16x16 => CompactMBType::P16x16,
253 MBType::P16x8 => CompactMBType::P16x8,
254 MBType::P8x16 => CompactMBType::P8x16,
255 MBType::P8x8 => CompactMBType::P8x8,
256 MBType::P8x8Ref0 => CompactMBType::P8x8Ref0,
257 MBType::PSkip => CompactMBType::PSkip,
258 MBType::Direct => CompactMBType::Direct,
259 MBType::B16x16(_) => CompactMBType::B16x16,
260 MBType::B16x8(_, _) => CompactMBType::B16x8,
261 MBType::B8x16(_, _) => CompactMBType::B8x16,
262 MBType::B8x8 => CompactMBType::B8x8,
263 MBType::BSkip => CompactMBType::BSkip,
264 }
265 }
266}
267
268#[repr(u8)]
269#[derive(Clone,Copy,Debug,PartialEq)]
270pub enum IntraPredMode {
271 Vertical,
272 Horizontal,
273 DC,
274 DiagDownLeft,
275 DiagDownRight,
276 VerRight,
277 HorDown,
278 VerLeft,
279 HorUp,
280 None,
281}
282
283impl IntraPredMode {
284 pub fn is_none(self) -> bool { self == IntraPredMode::None }
285 pub fn into_pred_idx(self) -> i8 {
286 if !self.is_none() {
287 self as u8 as i8
288 } else {
289 -1
290 }
291 }
292}
293
294impl Default for IntraPredMode {
295 fn default() -> Self { IntraPredMode::None }
296}
297
298impl From<u8> for IntraPredMode {
299 fn from(val: u8) -> Self {
300 match val {
301 0 => IntraPredMode::Vertical,
302 1 => IntraPredMode::Horizontal,
303 2 => IntraPredMode::DC,
304 3 => IntraPredMode::DiagDownLeft,
305 4 => IntraPredMode::DiagDownRight,
306 5 => IntraPredMode::VerRight,
307 6 => IntraPredMode::HorDown,
308 7 => IntraPredMode::VerLeft,
309 8 => IntraPredMode::HorUp,
310 _ => IntraPredMode::None,
311 }
312 }
313}
314
315impl Into<u8> for IntraPredMode {
316 fn into(self) -> u8 {
317 match self {
318 IntraPredMode::Vertical => 0,
319 IntraPredMode::Horizontal => 1,
320 IntraPredMode::DC => 2,
321 IntraPredMode::DiagDownLeft => 3,
322 IntraPredMode::DiagDownRight => 4,
323 IntraPredMode::VerRight => 5,
324 IntraPredMode::HorDown => 6,
325 IntraPredMode::VerLeft => 7,
326 IntraPredMode::HorUp => 8,
327 _ => 9,
328 }
329 }
330}
331
332pub const MISSING_POC: u16 = 0xFFFF;
333
334#[derive(Clone,Copy,Debug)]
335pub struct PicRef {
336 ref_idx: u8
337}
338
339pub const MISSING_REF: PicRef = PicRef { ref_idx: 0xFF };
340pub const INVALID_REF: PicRef = PicRef { ref_idx: 0xFE };
341pub const ZERO_REF: PicRef = PicRef { ref_idx: 0 };
342const DIRECT_FLAG: u8 = 0x40;
343
344impl PicRef {
345 pub fn new(ref_idx: u8) -> Self {
346 Self { ref_idx }
347 }
348 pub fn not_avail(self) -> bool {
349 self == MISSING_REF || self == INVALID_REF
350 }
351 pub fn index(self) -> usize { (self.ref_idx & !DIRECT_FLAG) as usize }
352 pub fn is_direct(self) -> bool { (self.ref_idx & DIRECT_FLAG) != 0 }
353 pub fn set_direct(&mut self) { self.ref_idx |= DIRECT_FLAG; }
354 fn min_pos(self, other: Self) -> Self {
355 match (self.not_avail(), other.not_avail()) {
356 (true, true) => self,
357 (false, true) => self,
358 (true, false) => other,
359 (false, false) => PicRef::new((self.ref_idx & !DIRECT_FLAG).min(other.ref_idx & !DIRECT_FLAG)),
360 }
361 }
362}
363
364impl Default for PicRef {
365 fn default() -> Self { MISSING_REF }
366}
367
368impl PartialEq for PicRef {
369 fn eq(&self, other: &Self) -> bool {
370 (self.ref_idx | DIRECT_FLAG) == (other.ref_idx | DIRECT_FLAG)
371 }
372}
373
374impl std::fmt::Display for PicRef {
375 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
376 if *self == MISSING_REF {
377 write!(f, "-1")
378 } else if *self == INVALID_REF {
379 write!(f, "-2")
380 } else {
381 write!(f, "{}", self.ref_idx & !DIRECT_FLAG)
382 }
383 }
384}
385
386#[derive(Clone,Copy,Default)]
387pub struct MBData {
388 pub mb_type: CompactMBType,
389 pub cbp: u8,
390 pub coded_flags: u32,
391 pub cmode: u8,
392 pub qp_y: u8,
393 pub qp_u: u8,
394 pub qp_v: u8,
395 pub transform_8x8: bool,
396}
397
398pub fn blk4_to_blk8(blk4: usize) -> usize {
399 const MAP: [usize; 16] = [ 0, 0, 1, 1, 0, 0, 1, 1, 2, 2, 3, 3, 2, 2, 3, 3 ];
400 MAP[blk4 & 0xF]
401}
402
403#[derive(Clone,Copy)]
404pub struct Blk8Data {
405 pub ref_idx: [PicRef; 2],
406 pub ncoded_c: [u8; 2],
407}
408
409impl Default for Blk8Data {
410 fn default() -> Self {
411 Self {
412 ref_idx: [MISSING_REF; 2],
413 ncoded_c: [0; 2],
414 }
415 }
416}
417
418#[derive(Clone,Copy,Default)]
419pub struct Blk4Data {
420 pub ncoded: u8,
421 pub ipred: IntraPredMode,
422 pub mv: [MV; 2],
423 pub mvd: [MV; 2],
424}
425
426pub struct SliceState {
427 pub mb_x: usize,
428 pub mb_y: usize,
429 pub mb_w: usize,
430 pub mb_h: usize,
431 pub mb_start: usize,
432
433 pub mb: GenericCache<MBData>,
434 pub blk8: GenericCache<Blk8Data>,
435 pub blk4: GenericCache<Blk4Data>,
436
22de733b 437 pub deblock: [u8; 16],
696e4e20
KS
438
439 pub has_top: bool,
440 pub has_left: bool,
22de733b
KS
441
442 pub top_line_y: Vec<u8>,
443 pub left_y: [u8; 17], // first element is top-left
444 pub top_line_c: [Vec<u8>; 2],
445 pub left_c: [[u8; 9]; 2],
696e4e20
KS
446}
447
d85f94f7
KS
448const BLK4_TO_D8: [usize; 16] = [ 0, 0, 3, 3, 0, 0, 3, 3, 12, 12, 15, 15, 12, 12, 15, 15 ];
449
696e4e20
KS
450impl SliceState {
451 pub fn new() -> Self {
452 Self {
453 mb_x: 0,
454 mb_y: 0,
455 mb_w: 0,
456 mb_h: 0,
457 mb_start: 0,
458 mb: GenericCache::new(0, 0, MBData::default()),
459 blk8: GenericCache::new(0, 0, Blk8Data::default()),
460 blk4: GenericCache::new(0, 0, Blk4Data::default()),
461
22de733b 462 deblock: [0; 16],
696e4e20
KS
463
464 has_top: false,
465 has_left: false,
22de733b
KS
466
467 top_line_y: Vec::new(),
468 left_y: [0; 17],
469 top_line_c: [Vec::new(), Vec::new()],
470 left_c: [[0; 9]; 2],
696e4e20
KS
471 }
472 }
473 pub fn reset(&mut self, mb_w: usize, mb_h: usize, mb_pos: usize) {
474 self.mb_w = mb_w;
475 self.mb_h = mb_h;
476 self.mb_start = mb_pos;
477 if mb_w > 0 {
478 self.mb_x = mb_pos % mb_w;
479 self.mb_y = mb_pos / mb_w;
480 } else {
481 self.mb_x = 0;
482 self.mb_y = 0;
483 }
484 self.mb = GenericCache::new(1, mb_w + 2, MBData::default());
485 self.blk8 = GenericCache::new(2, mb_w * 2 + 2, Blk8Data::default());
486 self.blk4 = GenericCache::new(4, mb_w * 4 + 2, Blk4Data::default());
487
696e4e20
KS
488 self.has_top = false;
489 self.has_left = false;
22de733b
KS
490
491 self.top_line_y.resize(mb_w * 16 + 1, 0x80);
492 self.top_line_c[0].resize(mb_w * 8 + 1, 0x80);
493 self.top_line_c[1].resize(mb_w * 8 + 1, 0x80);
494 self.left_y = [0x80; 17];
495 self.left_c = [[0x80; 9]; 2];
696e4e20 496 }
22de733b
KS
497 pub fn save_ipred_context(&mut self, frm: &NASimpleVideoFrame<u8>) {
498 let dstoff = self.mb_x * 16;
499 let srcoff = frm.offset[0] + self.mb_x * 16 + self.mb_y * 16 * frm.stride[0];
500 self.left_y[0] = self.top_line_y[dstoff + 15];
501 self.top_line_y[dstoff..][..16].copy_from_slice(&frm.data[srcoff + frm.stride[0] * 15..][..16]);
502 for (dst, src) in self.left_y[1..].iter_mut().zip(frm.data[srcoff..].chunks(frm.stride[0])) {
503 *dst = src[15];
504 }
505 for chroma in 0..2 {
506 let cstride = frm.stride[chroma + 1];
507 let dstoff = self.mb_x * 8;
508 let srcoff = frm.offset[chroma + 1] + self.mb_x * 8 + self.mb_y * 8 * cstride;
509 self.left_c[chroma][0] = self.top_line_c[chroma][dstoff + 7];
510 self.top_line_c[chroma][dstoff..][..8].copy_from_slice(&frm.data[srcoff + cstride * 7..][..8]);
511 for (dst, src) in self.left_c[chroma][1..].iter_mut().zip(frm.data[srcoff..].chunks(cstride)) {
512 *dst = src[7];
513 }
514 }
515 }
516 pub fn fill_deblock(&mut self, frefs: &FrameRefs, deblock_mode: u8, is_s: bool) {
696e4e20
KS
517 if deblock_mode == 1 {
518 return;
519 }
520
22de733b
KS
521 self.deblock = [0; 16];
522
696e4e20
KS
523 let tx8x8 = self.get_cur_mb().transform_8x8;
524
696e4e20
KS
525 let cur_mbt = self.get_cur_mb().mb_type;
526 let left_mbt = self.get_left_mb().mb_type;
527 let mut top_mbt = self.get_top_mb().mb_type;
528 for y in 0..4 {
696e4e20 529 let can_do_top = y != 0 || (self.mb_y != 0 && (self.has_top || deblock_mode != 2));
22de733b 530 if can_do_top && (!tx8x8 || (y & 1) == 0) {
696e4e20
KS
531 if is_s || cur_mbt.is_intra() || top_mbt.is_intra() {
532 let val = if y == 0 { 0x40 } else { 0x30 };
22de733b 533 for el in self.deblock[y * 4..][..4].iter_mut() { *el |= val; }
696e4e20
KS
534 } else {
535 for x in 0..4 {
22de733b
KS
536 let blk4 = x + y * 4;
537 let blk8 = x / 2 + (y / 2) * 2;
538 if self.get_cur_blk4(blk4).ncoded != 0 || self.get_top_blk4(blk4).ncoded != 0 {
539 self.deblock[y * 4 + x] |= 0x20;
696e4e20 540 } else {
22de733b
KS
541 let cur_mv = self.get_cur_blk4(blk4).mv;
542 let top_mv = self.get_top_blk4(blk4).mv;
543 let cur_ref = self.get_cur_blk8(blk8).ref_idx;
544 let top_ref = if (y & 1) == 0 { self.get_top_blk8(blk8).ref_idx } else { cur_ref };
545 if mvdiff4(cur_mv[0], top_mv[0]) || mvdiff4(cur_mv[1], top_mv[1]) || !frefs.cmp_refs(cur_ref, top_ref) {
546 self.deblock[y * 4 + x] |= 0x10;
696e4e20
KS
547 }
548 }
549 }
550 }
551 }
552 let mut lleft_mbt = left_mbt;
553 for x in 0..4 {
22de733b 554 let skip_8 = tx8x8 && (x & 1) != 0;
696e4e20
KS
555 let can_do_left = x > 0 || self.has_left || (self.mb_x != 0 && deblock_mode != 2);
556 if !can_do_left {
557 continue;
558 }
559 let blk4 = x + y * 4;
560 let blk8 = x / 2 + (y / 2) * 2;
22de733b
KS
561 if skip_8 {
562 } else if is_s || cur_mbt.is_intra() || lleft_mbt.is_intra() {
563 self.deblock[y * 4 + x] |= if x == 0 { 4 } else { 3 };
564 } else if self.get_cur_blk4(blk4).ncoded != 0 || self.get_left_blk4(blk4).ncoded != 0 {
565 self.deblock[y * 4 + x] |= 2;
696e4e20
KS
566 } else {
567 let cur_mv = self.get_cur_blk4(blk4).mv;
568 let left_mv = self.get_left_blk4(blk4).mv;
569 let cur_ref = self.get_cur_blk8(blk8).ref_idx;
22de733b
KS
570 let left_ref = if (x & 1) == 0 { self.get_left_blk8(blk8).ref_idx } else { cur_ref };
571 if mvdiff4(cur_mv[0], left_mv[0]) || mvdiff4(cur_mv[1], left_mv[1]) || !frefs.cmp_refs(cur_ref, left_ref) {
572 self.deblock[y * 4 + x] |= 1;
696e4e20
KS
573 }
574 }
575 lleft_mbt = cur_mbt;
576 }
577 top_mbt = cur_mbt;
696e4e20
KS
578 }
579 }
580 pub fn next_mb(&mut self) {
581 self.mb_x += 1;
582 self.has_left = true;
583 if self.mb_x == self.mb_w {
584 self.mb_x = 0;
585 self.mb_y += 1;
586 self.mb.update_row();
587 self.blk8.update_row();
588 self.blk4.update_row();
589
696e4e20
KS
590 self.has_left = false;
591 }
592 self.has_top = self.mb_x + self.mb_y * self.mb_w >= self.mb_start + self.mb_w;
593 }
594 pub fn get_cur_mb_idx(&self) -> usize { self.mb.xpos + self.mb_x }
595 pub fn get_cur_blk8_idx(&self, blk_no: usize) -> usize {
596 self.blk8.xpos + self.mb_x * 2 + (blk_no & 1) + (blk_no >> 1) * self.blk8.stride
597 }
598 pub fn get_cur_blk4_idx(&self, blk_no: usize) -> usize {
599 self.blk4.xpos + self.mb_x * 4 + (blk_no & 3) + (blk_no >> 2) * self.blk4.stride
600 }
601 pub fn get_cur_mb(&mut self) -> &mut MBData {
602 let idx = self.get_cur_mb_idx();
603 &mut self.mb.data[idx]
604 }
605 pub fn get_left_mb(&self) -> &MBData {
606 &self.mb.data[self.get_cur_mb_idx() - 1]
607 }
608 pub fn get_top_mb(&self) -> &MBData {
609 &self.mb.data[self.get_cur_mb_idx() - self.mb.stride]
610 }
611 pub fn get_cur_blk8(&mut self, blk_no: usize) -> &mut Blk8Data {
612 let idx = self.get_cur_blk8_idx(blk_no);
613 &mut self.blk8.data[idx]
614 }
615 pub fn get_left_blk8(&self, blk_no: usize) -> &Blk8Data {
616 &self.blk8.data[self.get_cur_blk8_idx(blk_no) - 1]
617 }
618 pub fn get_top_blk8(&self, blk_no: usize) -> &Blk8Data {
619 &self.blk8.data[self.get_cur_blk8_idx(blk_no) - self.blk8.stride]
620 }
621 pub fn get_cur_blk4(&mut self, blk_no: usize) -> &mut Blk4Data {
622 let idx = self.get_cur_blk4_idx(blk_no);
623 &mut self.blk4.data[idx]
624 }
625 pub fn get_left_blk4(&self, blk_no: usize) -> &Blk4Data {
626 &self.blk4.data[self.get_cur_blk4_idx(blk_no) - 1]
627 }
628 pub fn get_top_blk4(&self, blk_no: usize) -> &Blk4Data {
629 &self.blk4.data[self.get_cur_blk4_idx(blk_no) - self.blk4.stride]
630 }
631
632 pub fn apply_to_blk8<F: (Fn(&mut Blk8Data))>(&mut self, f: F) {
633 let start = self.get_cur_blk8_idx(0);
634 for row in self.blk8.data[start..].chunks_mut(self.blk8.stride).take(2) {
635 for el in row[..2].iter_mut() {
636 f(el);
637 }
638 }
639 }
640 pub fn apply_to_blk4<F: (Fn(&mut Blk4Data))>(&mut self, f: F) {
641 let start = self.get_cur_blk4_idx(0);
642 for row in self.blk4.data[start..].chunks_mut(self.blk4.stride).take(4) {
643 for el in row[..4].iter_mut() {
644 f(el);
645 }
646 }
647 }
648
649 pub fn fill_ipred(&mut self, imode: IntraPredMode) {
650 self.apply_to_blk4(|blk| blk.ipred = imode);
651 }
652 pub fn fill_ncoded(&mut self, nc: u8) {
653 self.apply_to_blk4(|blk| blk.ncoded = nc);
654 self.apply_to_blk8(|blk| blk.ncoded_c = [nc; 2]);
655 }
656 pub fn reset_mb_mv(&mut self) {
657 self.apply_to_blk8(|blk| blk.ref_idx = [INVALID_REF; 2]);
658 }
659
660 pub fn get_mv_ctx(&self, xoff: usize, yoff: usize, ref_l: usize) -> (usize, usize) {
661 let blk_no = xoff / 4 + yoff;
662 let mv_a = self.get_left_blk4(blk_no).mvd[ref_l];
663 let mv_b = self.get_top_blk4(blk_no).mvd[ref_l];
664 let mv = mv_a + mv_b;
665 let ctx0 = if mv.x < 3 { 0 } else if mv.x <= 32 { 1 } else { 2 };
666 let ctx1 = if mv.y < 3 { 0 } else if mv.y <= 32 { 1 } else { 2 };
667 (ctx0, ctx1)
668 }
669 pub fn get_mv_ref_ctx(&self, xoff: usize, yoff: usize, ref_l: usize) -> usize {
670 let blk_no = xoff / 8 + (yoff / 8) * 2;
671 let mut ctx = 0;
672 let left_ref = self.get_left_blk8(blk_no).ref_idx[ref_l];
673 let top_ref = self.get_top_blk8(blk_no).ref_idx[ref_l];
674 if !left_ref.not_avail() && !left_ref.is_direct() && left_ref.index() > 0 {
675 ctx += 1;
676 }
677 if !top_ref.not_avail() && !top_ref.is_direct() && top_ref.index() > 0 {
678 ctx += 2;
679 }
680 ctx
681 }
4a1ca15c 682 #[allow(clippy::if_same_then_else)]
696e4e20
KS
683 pub fn predict(&mut self, xpos: usize, ypos: usize, bw: usize, bh: usize, ref_l: usize, diff_mv: MV, ref_idx: PicRef) {
684 let midx = self.get_cur_blk4_idx(0) + xpos / 4 + ypos / 4 * self.blk4.stride;
685 let ridx = self.get_cur_blk8_idx(0) + xpos / 8 + ypos / 8 * self.blk8.stride;
686 let ridx_c = self.get_cur_blk8_idx(0) + (xpos + bw) / 8 + ypos / 8 * self.blk8.stride - if (ypos & 4) == 0 { self.blk8.stride } else { 0 };
687
688 let mv_a = self.blk4.data[midx - 1].mv[ref_l];
689 let mv_b = self.blk4.data[midx - self.blk4.stride].mv[ref_l];
690 let mut mv_c = self.blk4.data[midx - self.blk4.stride + bw / 4].mv[ref_l];
691
692 let rx = if (xpos & 4) != 0 { 0 } else { 1 };
693 let ry = if (ypos & 4) != 0 { 0 } else { self.blk8.stride };
694 let ref_a = self.blk8.data[ridx - rx].ref_idx[ref_l];
695 let ref_b = self.blk8.data[ridx - ry].ref_idx[ref_l];
696 let mut ref_c = self.blk8.data[ridx_c].ref_idx[ref_l];
697
698 if ref_c == MISSING_REF || (((xpos + bw) & 4) == 0 && (ypos & 4) != 0) {
699 mv_c = self.blk4.data[midx - self.blk4.stride - 1].mv[ref_l];
700 ref_c = self.blk8.data[ridx - rx - ry].ref_idx[ref_l];
701 }
702
703 let pred_mv = if bw == 16 && bh == 8 && ypos == 0 && ref_b == ref_idx {
704 mv_b
705 } else if bw == 16 && bh == 8 && ypos != 0 && ref_a == ref_idx {
706 mv_a
707 } else if bw == 8 && bh == 16 && xpos == 0 && ref_a == ref_idx {
708 mv_a
709 } else if bw == 8 && bh == 16 && xpos != 0 && ref_c == ref_idx {
710 mv_c
711 } else if ref_b == MISSING_REF && ref_c == MISSING_REF {
712 mv_a
713 } else {
714 let count = ((ref_a == ref_idx) as u8) + ((ref_b == ref_idx) as u8) + ((ref_c == ref_idx) as u8);
715 if count == 1 {
716 if ref_a == ref_idx {
717 mv_a
718 } else if ref_b == ref_idx {
719 mv_b
720 } else {
721 mv_c
722 }
723 } else {
724 MV::pred(mv_a, mv_b, mv_c)
725 }
726 };
727
728 let mv = pred_mv + diff_mv;
729 self.fill_mv (xpos, ypos, bw, bh, ref_l, mv);
730 self.fill_ref(xpos, ypos, bw, bh, ref_l, ref_idx);
731 }
732 pub fn predict_pskip(&mut self) {
733 let midx = self.get_cur_blk4_idx(0);
734 let ridx = self.get_cur_blk8_idx(0);
735
736 let mv_a = self.blk4.data[midx - 1].mv[0];
737 let mv_b = self.blk4.data[midx - self.blk4.stride].mv[0];
738 let mut mv_c = self.blk4.data[midx - self.blk4.stride + 4].mv[0];
739
740 let ref_a = self.blk8.data[ridx - 1].ref_idx[0];
741 let ref_b = self.blk8.data[ridx - self.blk8.stride].ref_idx[0];
742 let mut ref_c = self.blk8.data[ridx - self.blk8.stride + 2].ref_idx[0];
743
744 if ref_c == MISSING_REF {
745 mv_c = self.blk4.data[midx - self.blk4.stride - 1].mv[0];
746 ref_c = self.blk8.data[ridx - self.blk8.stride - 1].ref_idx[0];
747 }
748
749 let ref_idx = ZERO_REF;
750 let mv = if ref_a == MISSING_REF || ref_b == MISSING_REF || (ref_a == ZERO_REF && mv_a == ZERO_MV) || (ref_b == ZERO_REF && mv_b == ZERO_MV) {
751 ZERO_MV
752 } else {
753 let count = ((ref_a == ref_idx) as u8) + ((ref_b == ref_idx) as u8) + ((ref_c == ref_idx) as u8);
754 if count == 1 {
755 if ref_a == ref_idx {
756 mv_a
757 } else if ref_b == ref_idx {
758 mv_b
759 } else {
760 mv_c
761 }
762 } else {
763 MV::pred(mv_a, mv_b, mv_c)
764 }
765 };
766
767 self.fill_mv (0, 0, 16, 16, 0, mv);
768 self.fill_ref(0, 0, 16, 16, 0, ref_idx);
769 }
d85f94f7 770 pub fn predict_direct_mb(&mut self, frame_refs: &FrameRefs, temporal_mv: bool, direct_8x8: bool, cur_id: u16) {
696e4e20 771 let (col_mb, _, _) = frame_refs.get_colocated_info(self.mb_x, self.mb_y);
d85f94f7
KS
772 if direct_8x8 {
773 for blk4 in 0..16 {
774 let (mv0, ref0, mv1, ref1) = self.get_direct_mv(frame_refs, temporal_mv, cur_id, BLK4_TO_D8[blk4]);
775 self.get_cur_blk4(blk4).mv = [mv0, mv1];
776 self.get_cur_blk8(blk4_to_blk8(blk4)).ref_idx = [ref0, ref1];
777 }
778 } else if col_mb.mb_type.is_16x16_ref() || !temporal_mv {
696e4e20
KS
779 let (mv0, ref0, mv1, ref1) = self.get_direct_mv(frame_refs, temporal_mv, cur_id, 0);
780 self.apply_to_blk4(|blk4| blk4.mv = [mv0, mv1]);
781 self.apply_to_blk8(|blk8| blk8.ref_idx = [ref0, ref1]);
782 } else {
783 for blk4 in 0..16 {
784 let (mv0, ref0, mv1, ref1) = self.get_direct_mv(frame_refs, temporal_mv, cur_id, blk4);
785 self.get_cur_blk4(blk4).mv = [mv0, mv1];
786 self.get_cur_blk8(blk4_to_blk8(blk4)).ref_idx = [ref0, ref1];
787 }
788 }
789 }
d85f94f7
KS
790 pub fn predict_direct_sub(&mut self, frame_refs: &FrameRefs, temporal_mv: bool, direct8x8: bool, cur_id: u16, blk4: usize) {
791 let src_blk = if !direct8x8 { blk4 } else { BLK4_TO_D8[blk4] };
792 let (mv0, ref0, mv1, ref1) = self.get_direct_mv(frame_refs, temporal_mv, cur_id, src_blk);
4a1ca15c
KS
793 self.get_cur_blk4(blk4).mv = [mv0, mv1];
794 self.get_cur_blk8(blk4_to_blk8(blk4)).ref_idx = [ref0, ref1];
696e4e20
KS
795 }
796 pub fn get_direct_mv(&self, frame_refs: &FrameRefs, temporal_mv: bool, cur_id: u16, blk4: usize) -> (MV, PicRef, MV, PicRef) {
797 let (mbi, r1_poc, r1_long) = frame_refs.get_colocated_info(self.mb_x, self.mb_y);
798 let blk8 = blk4_to_blk8(blk4);
799 let (col_mv, r0_poc, col_idx) = if mbi.ref_poc[blk8] == [MISSING_POC; 2] {
800 (ZERO_MV, MISSING_POC, MISSING_REF)
801 } else if mbi.ref_poc[blk8][0] != MISSING_POC {
802 (mbi.mv[blk4][0], mbi.ref_poc[blk8][0], mbi.ref_idx[blk8][0])
803 } else {
804 (mbi.mv[blk4][1], mbi.ref_poc[blk8][1], mbi.ref_idx[blk8][1])
805 };
806 let (col_ref, r0_long) = frame_refs.map_ref0(r0_poc);
807 if temporal_mv {
808 let td = (i32::from(r1_poc) - i32::from(r0_poc)).max(-128).min(127);
809 if r0_long || td == 0 {
810 (col_mv, col_ref, ZERO_MV, ZERO_REF)
811 } else {
812 let tx = (16384 + (td / 2).abs()) / td;
813 let tb = (i32::from(cur_id) - i32::from(r0_poc)).max(-128).min(127);
814 let scale = ((tb * tx + 32) >> 6).max(-1024).min(1023);
815 let mv0 = MV {
816 x: ((i32::from(col_mv.x) * scale + 128) >> 8) as i16,
817 y: ((i32::from(col_mv.y) * scale + 128) >> 8) as i16,
818 };
819 let mv1 = mv0 - col_mv;
820 (mv0, col_ref, mv1, ZERO_REF)
821 }
822 } else {
823 let blk4 = 0; // we generate the same MV prediction for the whole MB
824 let blk8 = blk4_to_blk8(blk4);
825 let midx = self.get_cur_blk4_idx(blk4);
826 let ridx = self.get_cur_blk8_idx(blk8);
827 let ridx_c = self.get_cur_blk8_idx(blk8) + 16 / 8 - self.blk8.stride;
828
829 let mv_a = self.blk4.data[midx - 1].mv;
830 let mv_b = self.blk4.data[midx - self.blk4.stride].mv;
831 let mut mv_c = self.blk4.data[midx - self.blk4.stride + 16 / 4].mv;
832
833 let ref_a = self.blk8.data[ridx - 1].ref_idx;
834 let ref_b = self.blk8.data[ridx - self.blk8.stride].ref_idx;
835 let mut ref_c = self.blk8.data[ridx_c].ref_idx;
836
837 if ref_c == [MISSING_REF; 2] {
838 mv_c = self.blk4.data[midx - self.blk4.stride - 1].mv;
839 ref_c = self.blk8.data[ridx - self.blk8.stride - 1].ref_idx;
840 }
841 let mut refs = [INVALID_REF; 2];
842 for cur_ref in [ref_a, ref_b, ref_c].iter() {
843 refs[0] = refs[0].min_pos(cur_ref[0]);
844 refs[1] = refs[1].min_pos(cur_ref[1]);
845 }
846 if refs == [INVALID_REF; 2] {
847 return (ZERO_MV, ZERO_REF, ZERO_MV, ZERO_REF);
848 }
849
850 let mut col_zero = true;
851 if r1_long || col_idx != ZERO_REF {
852 col_zero = false;
853 }
854 if col_mv.x.abs() > 1 || col_mv.y.abs() > 1 {
855 col_zero = false;
856 }
857 let mut mvs = [ZERO_MV; 2];
858 for ref_l in 0..2 {
859 if mbi.mb_type.is_intra() || (!refs[ref_l].not_avail() && !(refs[ref_l] == ZERO_REF && col_zero)) {
860 let ref_idx = refs[ref_l];
861 mvs[ref_l] = if ref_b[ref_l] == MISSING_REF && ref_c[ref_l] == MISSING_REF {
862 mv_a[ref_l]
863 } else {
864 let count = ((ref_a[ref_l] == ref_idx) as u8) + ((ref_b[ref_l] == ref_idx) as u8) + ((ref_c[ref_l] == ref_idx) as u8);
865 if count == 1 {
866 if ref_a[ref_l] == ref_idx {
867 mv_a[ref_l]
868 } else if ref_b[ref_l] == ref_idx {
869 mv_b[ref_l]
870 } else {
871 mv_c[ref_l]
872 }
873 } else {
874 MV::pred(mv_a[ref_l], mv_b[ref_l], mv_c[ref_l])
875 }
876 };
877 }
878 }
879 (mvs[0], refs[0], mvs[1], refs[1])
880 }
881 }
882 pub fn fill_mv(&mut self, xpos: usize, ypos: usize, bw: usize, bh: usize, ref_l: usize, mv: MV) {
883 let start = self.get_cur_blk4_idx(0) + xpos / 4 + ypos / 4 * self.blk4.stride;
884 for row in self.blk4.data[start..].chunks_mut(self.blk4.stride).take(bh / 4) {
885 for blk in row[..bw / 4].iter_mut() {
886 blk.mv[ref_l] = mv;
887 }
888 }
889 }
890 pub fn fill_mvd(&mut self, xpos: usize, ypos: usize, bw: usize, bh: usize, ref_l: usize, mv: MV) {
891 let mvd = MV{ x: mv.x.abs().min(128), y: mv.y.abs().min(128) };
892 let start = self.get_cur_blk4_idx(0) + xpos / 4 + ypos / 4 * self.blk4.stride;
893 for row in self.blk4.data[start..].chunks_mut(self.blk4.stride).take(bh / 4) {
894 for blk in row[..bw / 4].iter_mut() {
895 blk.mvd[ref_l] = mvd;
896 }
897 }
898 }
899 pub fn fill_ref(&mut self, xpos: usize, ypos: usize, bw: usize, bh: usize, ref_l: usize, ref_idx: PicRef) {
900 let start = self.get_cur_blk8_idx(0) + xpos / 8 + ypos / 8 * self.blk8.stride;
901 if bw < 8 || bh < 8 {
902 self.blk8.data[start].ref_idx[ref_l] = ref_idx;
903 } else {
904 for row in self.blk8.data[start..].chunks_mut(self.blk8.stride).take(bh / 8) {
905 for blk in row[..bw / 8].iter_mut() {
906 blk.ref_idx[ref_l] = ref_idx;
907 }
908 }
909 }
910 }
911}
912
913fn mvdiff4(mv1: MV, mv2: MV) -> bool {
914 let mv = mv1 - mv2;
915 (mv.x.abs() >= 4) || (mv.y.abs() >= 4)
916}