h264: reword MB reconstruction and add weighted MC
[nihav.git] / nihav-itu / src / codecs / h264 / types.rs
CommitLineData
22de733b 1use nihav_core::frame::NASimpleVideoFrame;
696e4e20
KS
2use nihav_codec_support::codecs::{MV, ZERO_MV};
3use nihav_codec_support::data::GenericCache;
4use super::FrameRefs;
5
6#[repr(u8)]
7#[derive(Clone,Copy,Debug,PartialEq)]
8pub enum BMode {
9 L0,
10 L1,
11 Bi,
12}
13
14#[derive(Clone,Copy,Debug,PartialEq)]
15pub enum MBType {
16 Intra4x4,
17 Intra8x8,
18 Intra16x16(u8, u8, u8),
19 PCM,
20
21 P16x16,
22 P16x8,
23 P8x16,
24 P8x8,
25 P8x8Ref0,
26 PSkip,
27
28 Direct,
29 B16x16(BMode),
30 B16x8(BMode, BMode),
31 B8x16(BMode, BMode),
32 B8x8,
33 BSkip,
34}
35
36impl MBType {
37 pub fn is_intra(self) -> bool {
38 match self {
39 MBType::Intra4x4 | MBType::Intra8x8 | MBType::Intra16x16(_, _, _) | MBType::PCM => true,
40 _ => false,
41 }
42 }
43 pub fn is_intra16x16(self) -> bool {
44 if let MBType::Intra16x16(_, _, _) = self {
45 true
46 } else {
47 false
48 }
49 }
50 pub fn is_skip(self) -> bool {
51 match self {
52 MBType::PSkip | MBType::BSkip => true,
53 _ => false,
54 }
55 }
56 pub fn is_4x4(self) -> bool { self.num_parts() == 4 }
57 pub fn is_l0(self, part: usize) -> bool {
58 match self {
59 MBType::B16x16(mode) => mode == BMode::L0,
60 MBType::B16x8(mode0, mode1) | MBType::B8x16(mode0, mode1) => {
61 if part == 0 {
62 mode0 == BMode::L0
63 } else {
64 mode1 == BMode::L0
65 }
66 },
67 MBType::Direct | MBType::BSkip => false,
68 _ => true,
69 }
70 }
71 pub fn is_l1(self, part: usize) -> bool {
72 match self {
73 MBType::B16x16(mode) => mode == BMode::L1,
74 MBType::B16x8(mode0, mode1) | MBType::B8x16(mode0, mode1) => {
75 if part == 0 {
76 mode0 == BMode::L1
77 } else {
78 mode1 == BMode::L1
79 }
80 },
81 _ => false,
82 }
83 }
84 pub fn num_parts(self) -> usize {
85 match self {
86 MBType::Intra4x4 | MBType::Intra8x8 | MBType::Intra16x16(_, _, _) | MBType::PCM |
87 MBType::PSkip |
88 MBType::Direct | MBType::BSkip
89 => 1,
90 MBType::P16x16 |
91 MBType::B16x16(_)
92 => 1,
93 MBType::P16x8 | MBType::P8x16 |
94 MBType::B16x8(_, _) | MBType::B8x16(_, _)
95 => 2,
96 _ => 4,
97 }
98 }
99 pub fn size(self) -> (usize, usize) {
100 match self {
101 MBType::Intra4x4 |
102 MBType::Intra8x8 |
103 MBType::Intra16x16(_, _, _) |
104 MBType::PCM |
105 MBType::P16x16 |
106 MBType::PSkip |
107 MBType::Direct |
108 MBType::B16x16(_) |
109 MBType::BSkip
110 => (16, 16),
111 MBType::P16x8 | MBType::B16x8(_, _) => (16, 8),
112 MBType::P8x16 | MBType::B8x16(_, _) => (8, 16),
113 _ => (8, 8),
114 }
115 }
116}
117
118impl Default for MBType {
119 fn default() -> Self { MBType::Intra4x4 }
120}
121
122#[derive(Clone,Copy,Debug,PartialEq)]
123pub enum SubMBType {
124 P8x8,
125 P8x4,
126 P4x8,
127 P4x4,
128 Direct8x8,
129 B8x8(BMode),
130 B8x4(BMode),
131 B4x8(BMode),
132 B4x4(BMode),
133}
134
135impl SubMBType {
136 pub fn num_parts(self) -> usize {
137 match self {
138 SubMBType::P8x8 | SubMBType::Direct8x8 | SubMBType::B8x8(_) => 1,
139 SubMBType::P4x4 | SubMBType::B4x4(_) => 4,
140 _ => 2,
141 }
142 }
143 pub fn size(self) -> (usize, usize) {
144 match self {
145 SubMBType::P8x8 | SubMBType::Direct8x8 | SubMBType::B8x8(_) => (8, 8),
146 SubMBType::P8x4 | SubMBType::B8x4(_) => (8, 4),
147 SubMBType::P4x8 | SubMBType::B4x8(_) => (4, 8),
148 SubMBType::P4x4 | SubMBType::B4x4(_) => (4, 4),
149 }
150 }
151 pub fn is_l0(self) -> bool {
152 match self {
153 SubMBType::B8x8(mode) | SubMBType::B8x4(mode) |
154 SubMBType::B4x8(mode) | SubMBType::B4x4(mode) => {
155 mode == BMode::L0
156 },
157 _ => true,
158 }
159 }
160 pub fn is_l1(self) -> bool {
161 match self {
162 SubMBType::B8x8(mode) | SubMBType::B8x4(mode) |
163 SubMBType::B4x8(mode) | SubMBType::B4x4(mode) => {
164 mode == BMode::L1
165 },
166 _ => false,
167 }
168 }
169}
170
171impl Default for SubMBType {
172 fn default() -> Self { SubMBType::Direct8x8 }
173}
174
175#[repr(u8)]
176#[derive(Clone,Copy,Debug,PartialEq)]
177pub enum CompactMBType {
178 Intra4x4,
179 Intra8x8,
180 Intra16x16,
181 PCM,
182
183 P16x16,
184 P16x8,
185 P8x16,
186 P8x8,
187 P8x8Ref0,
188 PSkip,
189
190 Direct,
191 B16x16,
192 B16x8,
193 B8x16,
194 B8x8,
195 BSkip,
196
197 None,
198}
199
200impl CompactMBType {
201 pub fn is_intra(self) -> bool {
202 match self {
203 CompactMBType::Intra4x4 | CompactMBType::Intra8x8 | CompactMBType::Intra16x16 => true,
204 _ => false,
205 }
206 }
207 pub fn is_intra16orpcm(self) -> bool {
208 match self {
209 CompactMBType::Intra16x16 | CompactMBType::PCM => true,
210 _ => false,
211 }
212 }
213 pub fn is_skip(self) -> bool {
214 match self {
215 CompactMBType::PSkip | CompactMBType::BSkip => true,
216 _ => false,
217 }
218 }
219 pub fn is_direct(self) -> bool {
220 match self {
221 CompactMBType::BSkip | CompactMBType::Direct | CompactMBType::None => true,
222 _ => false,
223 }
224 }
225 pub fn is_inter(self) -> bool {
226 !self.is_intra() && !self.is_skip() && self != CompactMBType::PCM
227 }
495b7ec0 228 pub fn is_16x16_ref(self) -> bool {
696e4e20 229 match self {
495b7ec0
KS
230 CompactMBType::Intra4x4 |
231 CompactMBType::Intra8x8 |
232 CompactMBType::Intra16x16 |
233 CompactMBType::PCM |
234 CompactMBType::P16x16 |
235 CompactMBType::B16x16 => true,
236 _ => false,
696e4e20
KS
237 }
238 }
239}
240
241impl Default for CompactMBType {
242 fn default() -> Self { CompactMBType::None }
243}
244
245impl From<MBType> for CompactMBType {
246 fn from(mbtype: MBType) -> Self {
247 match mbtype {
248 MBType::Intra4x4 => CompactMBType::Intra4x4,
249 MBType::Intra8x8 => CompactMBType::Intra8x8,
250 MBType::Intra16x16(_, _, _) => CompactMBType::Intra16x16,
251 MBType::PCM => CompactMBType::PCM,
252 MBType::P16x16 => CompactMBType::P16x16,
253 MBType::P16x8 => CompactMBType::P16x8,
254 MBType::P8x16 => CompactMBType::P8x16,
255 MBType::P8x8 => CompactMBType::P8x8,
256 MBType::P8x8Ref0 => CompactMBType::P8x8Ref0,
257 MBType::PSkip => CompactMBType::PSkip,
258 MBType::Direct => CompactMBType::Direct,
259 MBType::B16x16(_) => CompactMBType::B16x16,
260 MBType::B16x8(_, _) => CompactMBType::B16x8,
261 MBType::B8x16(_, _) => CompactMBType::B8x16,
262 MBType::B8x8 => CompactMBType::B8x8,
263 MBType::BSkip => CompactMBType::BSkip,
264 }
265 }
266}
267
268#[repr(u8)]
269#[derive(Clone,Copy,Debug,PartialEq)]
270pub enum IntraPredMode {
271 Vertical,
272 Horizontal,
273 DC,
274 DiagDownLeft,
275 DiagDownRight,
276 VerRight,
277 HorDown,
278 VerLeft,
279 HorUp,
280 None,
281}
282
283impl IntraPredMode {
284 pub fn is_none(self) -> bool { self == IntraPredMode::None }
285 pub fn into_pred_idx(self) -> i8 {
286 if !self.is_none() {
287 self as u8 as i8
288 } else {
289 -1
290 }
291 }
292}
293
294impl Default for IntraPredMode {
295 fn default() -> Self { IntraPredMode::None }
296}
297
298impl From<u8> for IntraPredMode {
299 fn from(val: u8) -> Self {
300 match val {
301 0 => IntraPredMode::Vertical,
302 1 => IntraPredMode::Horizontal,
303 2 => IntraPredMode::DC,
304 3 => IntraPredMode::DiagDownLeft,
305 4 => IntraPredMode::DiagDownRight,
306 5 => IntraPredMode::VerRight,
307 6 => IntraPredMode::HorDown,
308 7 => IntraPredMode::VerLeft,
309 8 => IntraPredMode::HorUp,
310 _ => IntraPredMode::None,
311 }
312 }
313}
314
315impl Into<u8> for IntraPredMode {
316 fn into(self) -> u8 {
317 match self {
318 IntraPredMode::Vertical => 0,
319 IntraPredMode::Horizontal => 1,
320 IntraPredMode::DC => 2,
321 IntraPredMode::DiagDownLeft => 3,
322 IntraPredMode::DiagDownRight => 4,
323 IntraPredMode::VerRight => 5,
324 IntraPredMode::HorDown => 6,
325 IntraPredMode::VerLeft => 7,
326 IntraPredMode::HorUp => 8,
327 _ => 9,
328 }
329 }
330}
331
332pub const MISSING_POC: u16 = 0xFFFF;
333
334#[derive(Clone,Copy,Debug)]
335pub struct PicRef {
336 ref_idx: u8
337}
338
339pub const MISSING_REF: PicRef = PicRef { ref_idx: 0xFF };
340pub const INVALID_REF: PicRef = PicRef { ref_idx: 0xFE };
341pub const ZERO_REF: PicRef = PicRef { ref_idx: 0 };
342const DIRECT_FLAG: u8 = 0x40;
343
344impl PicRef {
345 pub fn new(ref_idx: u8) -> Self {
346 Self { ref_idx }
347 }
348 pub fn not_avail(self) -> bool {
349 self == MISSING_REF || self == INVALID_REF
350 }
351 pub fn index(self) -> usize { (self.ref_idx & !DIRECT_FLAG) as usize }
352 pub fn is_direct(self) -> bool { (self.ref_idx & DIRECT_FLAG) != 0 }
353 pub fn set_direct(&mut self) { self.ref_idx |= DIRECT_FLAG; }
354 fn min_pos(self, other: Self) -> Self {
355 match (self.not_avail(), other.not_avail()) {
356 (true, true) => self,
357 (false, true) => self,
358 (true, false) => other,
359 (false, false) => PicRef::new((self.ref_idx & !DIRECT_FLAG).min(other.ref_idx & !DIRECT_FLAG)),
360 }
361 }
362}
363
364impl Default for PicRef {
365 fn default() -> Self { MISSING_REF }
366}
367
368impl PartialEq for PicRef {
369 fn eq(&self, other: &Self) -> bool {
370 (self.ref_idx | DIRECT_FLAG) == (other.ref_idx | DIRECT_FLAG)
371 }
372}
373
374impl std::fmt::Display for PicRef {
375 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
376 if *self == MISSING_REF {
377 write!(f, "-1")
378 } else if *self == INVALID_REF {
379 write!(f, "-2")
380 } else {
381 write!(f, "{}", self.ref_idx & !DIRECT_FLAG)
382 }
383 }
384}
385
386#[derive(Clone,Copy,Default)]
387pub struct MBData {
388 pub mb_type: CompactMBType,
389 pub cbp: u8,
390 pub coded_flags: u32,
391 pub cmode: u8,
392 pub qp_y: u8,
393 pub qp_u: u8,
394 pub qp_v: u8,
395 pub transform_8x8: bool,
396}
397
398pub fn blk4_to_blk8(blk4: usize) -> usize {
399 const MAP: [usize; 16] = [ 0, 0, 1, 1, 0, 0, 1, 1, 2, 2, 3, 3, 2, 2, 3, 3 ];
400 MAP[blk4 & 0xF]
401}
402
403#[derive(Clone,Copy)]
404pub struct Blk8Data {
405 pub ref_idx: [PicRef; 2],
406 pub ncoded_c: [u8; 2],
407}
408
409impl Default for Blk8Data {
410 fn default() -> Self {
411 Self {
412 ref_idx: [MISSING_REF; 2],
413 ncoded_c: [0; 2],
414 }
415 }
416}
417
418#[derive(Clone,Copy,Default)]
419pub struct Blk4Data {
420 pub ncoded: u8,
421 pub ipred: IntraPredMode,
422 pub mv: [MV; 2],
423 pub mvd: [MV; 2],
424}
425
426pub struct SliceState {
427 pub mb_x: usize,
428 pub mb_y: usize,
429 pub mb_w: usize,
430 pub mb_h: usize,
431 pub mb_start: usize,
432
433 pub mb: GenericCache<MBData>,
434 pub blk8: GenericCache<Blk8Data>,
435 pub blk4: GenericCache<Blk4Data>,
436
22de733b 437 pub deblock: [u8; 16],
696e4e20
KS
438
439 pub has_top: bool,
440 pub has_left: bool,
22de733b
KS
441
442 pub top_line_y: Vec<u8>,
443 pub left_y: [u8; 17], // first element is top-left
444 pub top_line_c: [Vec<u8>; 2],
445 pub left_c: [[u8; 9]; 2],
696e4e20
KS
446}
447
448impl SliceState {
449 pub fn new() -> Self {
450 Self {
451 mb_x: 0,
452 mb_y: 0,
453 mb_w: 0,
454 mb_h: 0,
455 mb_start: 0,
456 mb: GenericCache::new(0, 0, MBData::default()),
457 blk8: GenericCache::new(0, 0, Blk8Data::default()),
458 blk4: GenericCache::new(0, 0, Blk4Data::default()),
459
22de733b 460 deblock: [0; 16],
696e4e20
KS
461
462 has_top: false,
463 has_left: false,
22de733b
KS
464
465 top_line_y: Vec::new(),
466 left_y: [0; 17],
467 top_line_c: [Vec::new(), Vec::new()],
468 left_c: [[0; 9]; 2],
696e4e20
KS
469 }
470 }
471 pub fn reset(&mut self, mb_w: usize, mb_h: usize, mb_pos: usize) {
472 self.mb_w = mb_w;
473 self.mb_h = mb_h;
474 self.mb_start = mb_pos;
475 if mb_w > 0 {
476 self.mb_x = mb_pos % mb_w;
477 self.mb_y = mb_pos / mb_w;
478 } else {
479 self.mb_x = 0;
480 self.mb_y = 0;
481 }
482 self.mb = GenericCache::new(1, mb_w + 2, MBData::default());
483 self.blk8 = GenericCache::new(2, mb_w * 2 + 2, Blk8Data::default());
484 self.blk4 = GenericCache::new(4, mb_w * 4 + 2, Blk4Data::default());
485
696e4e20
KS
486 self.has_top = false;
487 self.has_left = false;
22de733b
KS
488
489 self.top_line_y.resize(mb_w * 16 + 1, 0x80);
490 self.top_line_c[0].resize(mb_w * 8 + 1, 0x80);
491 self.top_line_c[1].resize(mb_w * 8 + 1, 0x80);
492 self.left_y = [0x80; 17];
493 self.left_c = [[0x80; 9]; 2];
696e4e20 494 }
22de733b
KS
495 pub fn save_ipred_context(&mut self, frm: &NASimpleVideoFrame<u8>) {
496 let dstoff = self.mb_x * 16;
497 let srcoff = frm.offset[0] + self.mb_x * 16 + self.mb_y * 16 * frm.stride[0];
498 self.left_y[0] = self.top_line_y[dstoff + 15];
499 self.top_line_y[dstoff..][..16].copy_from_slice(&frm.data[srcoff + frm.stride[0] * 15..][..16]);
500 for (dst, src) in self.left_y[1..].iter_mut().zip(frm.data[srcoff..].chunks(frm.stride[0])) {
501 *dst = src[15];
502 }
503 for chroma in 0..2 {
504 let cstride = frm.stride[chroma + 1];
505 let dstoff = self.mb_x * 8;
506 let srcoff = frm.offset[chroma + 1] + self.mb_x * 8 + self.mb_y * 8 * cstride;
507 self.left_c[chroma][0] = self.top_line_c[chroma][dstoff + 7];
508 self.top_line_c[chroma][dstoff..][..8].copy_from_slice(&frm.data[srcoff + cstride * 7..][..8]);
509 for (dst, src) in self.left_c[chroma][1..].iter_mut().zip(frm.data[srcoff..].chunks(cstride)) {
510 *dst = src[7];
511 }
512 }
513 }
514 pub fn fill_deblock(&mut self, frefs: &FrameRefs, deblock_mode: u8, is_s: bool) {
696e4e20
KS
515 if deblock_mode == 1 {
516 return;
517 }
518
22de733b
KS
519 self.deblock = [0; 16];
520
696e4e20
KS
521 let tx8x8 = self.get_cur_mb().transform_8x8;
522
696e4e20
KS
523 let cur_mbt = self.get_cur_mb().mb_type;
524 let left_mbt = self.get_left_mb().mb_type;
525 let mut top_mbt = self.get_top_mb().mb_type;
526 for y in 0..4 {
696e4e20 527 let can_do_top = y != 0 || (self.mb_y != 0 && (self.has_top || deblock_mode != 2));
22de733b 528 if can_do_top && (!tx8x8 || (y & 1) == 0) {
696e4e20
KS
529 if is_s || cur_mbt.is_intra() || top_mbt.is_intra() {
530 let val = if y == 0 { 0x40 } else { 0x30 };
22de733b 531 for el in self.deblock[y * 4..][..4].iter_mut() { *el |= val; }
696e4e20
KS
532 } else {
533 for x in 0..4 {
22de733b
KS
534 let blk4 = x + y * 4;
535 let blk8 = x / 2 + (y / 2) * 2;
536 if self.get_cur_blk4(blk4).ncoded != 0 || self.get_top_blk4(blk4).ncoded != 0 {
537 self.deblock[y * 4 + x] |= 0x20;
696e4e20 538 } else {
22de733b
KS
539 let cur_mv = self.get_cur_blk4(blk4).mv;
540 let top_mv = self.get_top_blk4(blk4).mv;
541 let cur_ref = self.get_cur_blk8(blk8).ref_idx;
542 let top_ref = if (y & 1) == 0 { self.get_top_blk8(blk8).ref_idx } else { cur_ref };
543 if mvdiff4(cur_mv[0], top_mv[0]) || mvdiff4(cur_mv[1], top_mv[1]) || !frefs.cmp_refs(cur_ref, top_ref) {
544 self.deblock[y * 4 + x] |= 0x10;
696e4e20
KS
545 }
546 }
547 }
548 }
549 }
550 let mut lleft_mbt = left_mbt;
551 for x in 0..4 {
22de733b 552 let skip_8 = tx8x8 && (x & 1) != 0;
696e4e20
KS
553 let can_do_left = x > 0 || self.has_left || (self.mb_x != 0 && deblock_mode != 2);
554 if !can_do_left {
555 continue;
556 }
557 let blk4 = x + y * 4;
558 let blk8 = x / 2 + (y / 2) * 2;
22de733b
KS
559 if skip_8 {
560 } else if is_s || cur_mbt.is_intra() || lleft_mbt.is_intra() {
561 self.deblock[y * 4 + x] |= if x == 0 { 4 } else { 3 };
562 } else if self.get_cur_blk4(blk4).ncoded != 0 || self.get_left_blk4(blk4).ncoded != 0 {
563 self.deblock[y * 4 + x] |= 2;
696e4e20
KS
564 } else {
565 let cur_mv = self.get_cur_blk4(blk4).mv;
566 let left_mv = self.get_left_blk4(blk4).mv;
567 let cur_ref = self.get_cur_blk8(blk8).ref_idx;
22de733b
KS
568 let left_ref = if (x & 1) == 0 { self.get_left_blk8(blk8).ref_idx } else { cur_ref };
569 if mvdiff4(cur_mv[0], left_mv[0]) || mvdiff4(cur_mv[1], left_mv[1]) || !frefs.cmp_refs(cur_ref, left_ref) {
570 self.deblock[y * 4 + x] |= 1;
696e4e20
KS
571 }
572 }
573 lleft_mbt = cur_mbt;
574 }
575 top_mbt = cur_mbt;
696e4e20
KS
576 }
577 }
578 pub fn next_mb(&mut self) {
579 self.mb_x += 1;
580 self.has_left = true;
581 if self.mb_x == self.mb_w {
582 self.mb_x = 0;
583 self.mb_y += 1;
584 self.mb.update_row();
585 self.blk8.update_row();
586 self.blk4.update_row();
587
696e4e20
KS
588 self.has_left = false;
589 }
590 self.has_top = self.mb_x + self.mb_y * self.mb_w >= self.mb_start + self.mb_w;
591 }
592 pub fn get_cur_mb_idx(&self) -> usize { self.mb.xpos + self.mb_x }
593 pub fn get_cur_blk8_idx(&self, blk_no: usize) -> usize {
594 self.blk8.xpos + self.mb_x * 2 + (blk_no & 1) + (blk_no >> 1) * self.blk8.stride
595 }
596 pub fn get_cur_blk4_idx(&self, blk_no: usize) -> usize {
597 self.blk4.xpos + self.mb_x * 4 + (blk_no & 3) + (blk_no >> 2) * self.blk4.stride
598 }
599 pub fn get_cur_mb(&mut self) -> &mut MBData {
600 let idx = self.get_cur_mb_idx();
601 &mut self.mb.data[idx]
602 }
603 pub fn get_left_mb(&self) -> &MBData {
604 &self.mb.data[self.get_cur_mb_idx() - 1]
605 }
606 pub fn get_top_mb(&self) -> &MBData {
607 &self.mb.data[self.get_cur_mb_idx() - self.mb.stride]
608 }
609 pub fn get_cur_blk8(&mut self, blk_no: usize) -> &mut Blk8Data {
610 let idx = self.get_cur_blk8_idx(blk_no);
611 &mut self.blk8.data[idx]
612 }
613 pub fn get_left_blk8(&self, blk_no: usize) -> &Blk8Data {
614 &self.blk8.data[self.get_cur_blk8_idx(blk_no) - 1]
615 }
616 pub fn get_top_blk8(&self, blk_no: usize) -> &Blk8Data {
617 &self.blk8.data[self.get_cur_blk8_idx(blk_no) - self.blk8.stride]
618 }
619 pub fn get_cur_blk4(&mut self, blk_no: usize) -> &mut Blk4Data {
620 let idx = self.get_cur_blk4_idx(blk_no);
621 &mut self.blk4.data[idx]
622 }
623 pub fn get_left_blk4(&self, blk_no: usize) -> &Blk4Data {
624 &self.blk4.data[self.get_cur_blk4_idx(blk_no) - 1]
625 }
626 pub fn get_top_blk4(&self, blk_no: usize) -> &Blk4Data {
627 &self.blk4.data[self.get_cur_blk4_idx(blk_no) - self.blk4.stride]
628 }
629
630 pub fn apply_to_blk8<F: (Fn(&mut Blk8Data))>(&mut self, f: F) {
631 let start = self.get_cur_blk8_idx(0);
632 for row in self.blk8.data[start..].chunks_mut(self.blk8.stride).take(2) {
633 for el in row[..2].iter_mut() {
634 f(el);
635 }
636 }
637 }
638 pub fn apply_to_blk4<F: (Fn(&mut Blk4Data))>(&mut self, f: F) {
639 let start = self.get_cur_blk4_idx(0);
640 for row in self.blk4.data[start..].chunks_mut(self.blk4.stride).take(4) {
641 for el in row[..4].iter_mut() {
642 f(el);
643 }
644 }
645 }
646
647 pub fn fill_ipred(&mut self, imode: IntraPredMode) {
648 self.apply_to_blk4(|blk| blk.ipred = imode);
649 }
650 pub fn fill_ncoded(&mut self, nc: u8) {
651 self.apply_to_blk4(|blk| blk.ncoded = nc);
652 self.apply_to_blk8(|blk| blk.ncoded_c = [nc; 2]);
653 }
654 pub fn reset_mb_mv(&mut self) {
655 self.apply_to_blk8(|blk| blk.ref_idx = [INVALID_REF; 2]);
656 }
657
658 pub fn get_mv_ctx(&self, xoff: usize, yoff: usize, ref_l: usize) -> (usize, usize) {
659 let blk_no = xoff / 4 + yoff;
660 let mv_a = self.get_left_blk4(blk_no).mvd[ref_l];
661 let mv_b = self.get_top_blk4(blk_no).mvd[ref_l];
662 let mv = mv_a + mv_b;
663 let ctx0 = if mv.x < 3 { 0 } else if mv.x <= 32 { 1 } else { 2 };
664 let ctx1 = if mv.y < 3 { 0 } else if mv.y <= 32 { 1 } else { 2 };
665 (ctx0, ctx1)
666 }
667 pub fn get_mv_ref_ctx(&self, xoff: usize, yoff: usize, ref_l: usize) -> usize {
668 let blk_no = xoff / 8 + (yoff / 8) * 2;
669 let mut ctx = 0;
670 let left_ref = self.get_left_blk8(blk_no).ref_idx[ref_l];
671 let top_ref = self.get_top_blk8(blk_no).ref_idx[ref_l];
672 if !left_ref.not_avail() && !left_ref.is_direct() && left_ref.index() > 0 {
673 ctx += 1;
674 }
675 if !top_ref.not_avail() && !top_ref.is_direct() && top_ref.index() > 0 {
676 ctx += 2;
677 }
678 ctx
679 }
4a1ca15c 680 #[allow(clippy::if_same_then_else)]
696e4e20
KS
681 pub fn predict(&mut self, xpos: usize, ypos: usize, bw: usize, bh: usize, ref_l: usize, diff_mv: MV, ref_idx: PicRef) {
682 let midx = self.get_cur_blk4_idx(0) + xpos / 4 + ypos / 4 * self.blk4.stride;
683 let ridx = self.get_cur_blk8_idx(0) + xpos / 8 + ypos / 8 * self.blk8.stride;
684 let ridx_c = self.get_cur_blk8_idx(0) + (xpos + bw) / 8 + ypos / 8 * self.blk8.stride - if (ypos & 4) == 0 { self.blk8.stride } else { 0 };
685
686 let mv_a = self.blk4.data[midx - 1].mv[ref_l];
687 let mv_b = self.blk4.data[midx - self.blk4.stride].mv[ref_l];
688 let mut mv_c = self.blk4.data[midx - self.blk4.stride + bw / 4].mv[ref_l];
689
690 let rx = if (xpos & 4) != 0 { 0 } else { 1 };
691 let ry = if (ypos & 4) != 0 { 0 } else { self.blk8.stride };
692 let ref_a = self.blk8.data[ridx - rx].ref_idx[ref_l];
693 let ref_b = self.blk8.data[ridx - ry].ref_idx[ref_l];
694 let mut ref_c = self.blk8.data[ridx_c].ref_idx[ref_l];
695
696 if ref_c == MISSING_REF || (((xpos + bw) & 4) == 0 && (ypos & 4) != 0) {
697 mv_c = self.blk4.data[midx - self.blk4.stride - 1].mv[ref_l];
698 ref_c = self.blk8.data[ridx - rx - ry].ref_idx[ref_l];
699 }
700
701 let pred_mv = if bw == 16 && bh == 8 && ypos == 0 && ref_b == ref_idx {
702 mv_b
703 } else if bw == 16 && bh == 8 && ypos != 0 && ref_a == ref_idx {
704 mv_a
705 } else if bw == 8 && bh == 16 && xpos == 0 && ref_a == ref_idx {
706 mv_a
707 } else if bw == 8 && bh == 16 && xpos != 0 && ref_c == ref_idx {
708 mv_c
709 } else if ref_b == MISSING_REF && ref_c == MISSING_REF {
710 mv_a
711 } else {
712 let count = ((ref_a == ref_idx) as u8) + ((ref_b == ref_idx) as u8) + ((ref_c == ref_idx) as u8);
713 if count == 1 {
714 if ref_a == ref_idx {
715 mv_a
716 } else if ref_b == ref_idx {
717 mv_b
718 } else {
719 mv_c
720 }
721 } else {
722 MV::pred(mv_a, mv_b, mv_c)
723 }
724 };
725
726 let mv = pred_mv + diff_mv;
727 self.fill_mv (xpos, ypos, bw, bh, ref_l, mv);
728 self.fill_ref(xpos, ypos, bw, bh, ref_l, ref_idx);
729 }
730 pub fn predict_pskip(&mut self) {
731 let midx = self.get_cur_blk4_idx(0);
732 let ridx = self.get_cur_blk8_idx(0);
733
734 let mv_a = self.blk4.data[midx - 1].mv[0];
735 let mv_b = self.blk4.data[midx - self.blk4.stride].mv[0];
736 let mut mv_c = self.blk4.data[midx - self.blk4.stride + 4].mv[0];
737
738 let ref_a = self.blk8.data[ridx - 1].ref_idx[0];
739 let ref_b = self.blk8.data[ridx - self.blk8.stride].ref_idx[0];
740 let mut ref_c = self.blk8.data[ridx - self.blk8.stride + 2].ref_idx[0];
741
742 if ref_c == MISSING_REF {
743 mv_c = self.blk4.data[midx - self.blk4.stride - 1].mv[0];
744 ref_c = self.blk8.data[ridx - self.blk8.stride - 1].ref_idx[0];
745 }
746
747 let ref_idx = ZERO_REF;
748 let mv = if ref_a == MISSING_REF || ref_b == MISSING_REF || (ref_a == ZERO_REF && mv_a == ZERO_MV) || (ref_b == ZERO_REF && mv_b == ZERO_MV) {
749 ZERO_MV
750 } else {
751 let count = ((ref_a == ref_idx) as u8) + ((ref_b == ref_idx) as u8) + ((ref_c == ref_idx) as u8);
752 if count == 1 {
753 if ref_a == ref_idx {
754 mv_a
755 } else if ref_b == ref_idx {
756 mv_b
757 } else {
758 mv_c
759 }
760 } else {
761 MV::pred(mv_a, mv_b, mv_c)
762 }
763 };
764
765 self.fill_mv (0, 0, 16, 16, 0, mv);
766 self.fill_ref(0, 0, 16, 16, 0, ref_idx);
767 }
768 pub fn predict_direct_mb(&mut self, frame_refs: &FrameRefs, temporal_mv: bool, cur_id: u16) {
769 let (col_mb, _, _) = frame_refs.get_colocated_info(self.mb_x, self.mb_y);
495b7ec0 770 if col_mb.mb_type.is_16x16_ref() || !temporal_mv {
696e4e20
KS
771 let (mv0, ref0, mv1, ref1) = self.get_direct_mv(frame_refs, temporal_mv, cur_id, 0);
772 self.apply_to_blk4(|blk4| blk4.mv = [mv0, mv1]);
773 self.apply_to_blk8(|blk8| blk8.ref_idx = [ref0, ref1]);
774 } else {
775 for blk4 in 0..16 {
776 let (mv0, ref0, mv1, ref1) = self.get_direct_mv(frame_refs, temporal_mv, cur_id, blk4);
777 self.get_cur_blk4(blk4).mv = [mv0, mv1];
778 self.get_cur_blk8(blk4_to_blk8(blk4)).ref_idx = [ref0, ref1];
779 }
780 }
781 }
782 pub fn predict_direct_sub(&mut self, frame_refs: &FrameRefs, temporal_mv: bool, cur_id: u16, blk4: usize) {
4a1ca15c
KS
783 let (mv0, ref0, mv1, ref1) = self.get_direct_mv(frame_refs, temporal_mv, cur_id, blk4);
784 self.get_cur_blk4(blk4).mv = [mv0, mv1];
785 self.get_cur_blk8(blk4_to_blk8(blk4)).ref_idx = [ref0, ref1];
696e4e20
KS
786 }
787 pub fn get_direct_mv(&self, frame_refs: &FrameRefs, temporal_mv: bool, cur_id: u16, blk4: usize) -> (MV, PicRef, MV, PicRef) {
788 let (mbi, r1_poc, r1_long) = frame_refs.get_colocated_info(self.mb_x, self.mb_y);
789 let blk8 = blk4_to_blk8(blk4);
790 let (col_mv, r0_poc, col_idx) = if mbi.ref_poc[blk8] == [MISSING_POC; 2] {
791 (ZERO_MV, MISSING_POC, MISSING_REF)
792 } else if mbi.ref_poc[blk8][0] != MISSING_POC {
793 (mbi.mv[blk4][0], mbi.ref_poc[blk8][0], mbi.ref_idx[blk8][0])
794 } else {
795 (mbi.mv[blk4][1], mbi.ref_poc[blk8][1], mbi.ref_idx[blk8][1])
796 };
797 let (col_ref, r0_long) = frame_refs.map_ref0(r0_poc);
798 if temporal_mv {
799 let td = (i32::from(r1_poc) - i32::from(r0_poc)).max(-128).min(127);
800 if r0_long || td == 0 {
801 (col_mv, col_ref, ZERO_MV, ZERO_REF)
802 } else {
803 let tx = (16384 + (td / 2).abs()) / td;
804 let tb = (i32::from(cur_id) - i32::from(r0_poc)).max(-128).min(127);
805 let scale = ((tb * tx + 32) >> 6).max(-1024).min(1023);
806 let mv0 = MV {
807 x: ((i32::from(col_mv.x) * scale + 128) >> 8) as i16,
808 y: ((i32::from(col_mv.y) * scale + 128) >> 8) as i16,
809 };
810 let mv1 = mv0 - col_mv;
811 (mv0, col_ref, mv1, ZERO_REF)
812 }
813 } else {
814 let blk4 = 0; // we generate the same MV prediction for the whole MB
815 let blk8 = blk4_to_blk8(blk4);
816 let midx = self.get_cur_blk4_idx(blk4);
817 let ridx = self.get_cur_blk8_idx(blk8);
818 let ridx_c = self.get_cur_blk8_idx(blk8) + 16 / 8 - self.blk8.stride;
819
820 let mv_a = self.blk4.data[midx - 1].mv;
821 let mv_b = self.blk4.data[midx - self.blk4.stride].mv;
822 let mut mv_c = self.blk4.data[midx - self.blk4.stride + 16 / 4].mv;
823
824 let ref_a = self.blk8.data[ridx - 1].ref_idx;
825 let ref_b = self.blk8.data[ridx - self.blk8.stride].ref_idx;
826 let mut ref_c = self.blk8.data[ridx_c].ref_idx;
827
828 if ref_c == [MISSING_REF; 2] {
829 mv_c = self.blk4.data[midx - self.blk4.stride - 1].mv;
830 ref_c = self.blk8.data[ridx - self.blk8.stride - 1].ref_idx;
831 }
832 let mut refs = [INVALID_REF; 2];
833 for cur_ref in [ref_a, ref_b, ref_c].iter() {
834 refs[0] = refs[0].min_pos(cur_ref[0]);
835 refs[1] = refs[1].min_pos(cur_ref[1]);
836 }
837 if refs == [INVALID_REF; 2] {
838 return (ZERO_MV, ZERO_REF, ZERO_MV, ZERO_REF);
839 }
840
841 let mut col_zero = true;
842 if r1_long || col_idx != ZERO_REF {
843 col_zero = false;
844 }
845 if col_mv.x.abs() > 1 || col_mv.y.abs() > 1 {
846 col_zero = false;
847 }
848 let mut mvs = [ZERO_MV; 2];
849 for ref_l in 0..2 {
850 if mbi.mb_type.is_intra() || (!refs[ref_l].not_avail() && !(refs[ref_l] == ZERO_REF && col_zero)) {
851 let ref_idx = refs[ref_l];
852 mvs[ref_l] = if ref_b[ref_l] == MISSING_REF && ref_c[ref_l] == MISSING_REF {
853 mv_a[ref_l]
854 } else {
855 let count = ((ref_a[ref_l] == ref_idx) as u8) + ((ref_b[ref_l] == ref_idx) as u8) + ((ref_c[ref_l] == ref_idx) as u8);
856 if count == 1 {
857 if ref_a[ref_l] == ref_idx {
858 mv_a[ref_l]
859 } else if ref_b[ref_l] == ref_idx {
860 mv_b[ref_l]
861 } else {
862 mv_c[ref_l]
863 }
864 } else {
865 MV::pred(mv_a[ref_l], mv_b[ref_l], mv_c[ref_l])
866 }
867 };
868 }
869 }
870 (mvs[0], refs[0], mvs[1], refs[1])
871 }
872 }
873 pub fn fill_mv(&mut self, xpos: usize, ypos: usize, bw: usize, bh: usize, ref_l: usize, mv: MV) {
874 let start = self.get_cur_blk4_idx(0) + xpos / 4 + ypos / 4 * self.blk4.stride;
875 for row in self.blk4.data[start..].chunks_mut(self.blk4.stride).take(bh / 4) {
876 for blk in row[..bw / 4].iter_mut() {
877 blk.mv[ref_l] = mv;
878 }
879 }
880 }
881 pub fn fill_mvd(&mut self, xpos: usize, ypos: usize, bw: usize, bh: usize, ref_l: usize, mv: MV) {
882 let mvd = MV{ x: mv.x.abs().min(128), y: mv.y.abs().min(128) };
883 let start = self.get_cur_blk4_idx(0) + xpos / 4 + ypos / 4 * self.blk4.stride;
884 for row in self.blk4.data[start..].chunks_mut(self.blk4.stride).take(bh / 4) {
885 for blk in row[..bw / 4].iter_mut() {
886 blk.mvd[ref_l] = mvd;
887 }
888 }
889 }
890 pub fn fill_ref(&mut self, xpos: usize, ypos: usize, bw: usize, bh: usize, ref_l: usize, ref_idx: PicRef) {
891 let start = self.get_cur_blk8_idx(0) + xpos / 8 + ypos / 8 * self.blk8.stride;
892 if bw < 8 || bh < 8 {
893 self.blk8.data[start].ref_idx[ref_l] = ref_idx;
894 } else {
895 for row in self.blk8.data[start..].chunks_mut(self.blk8.stride).take(bh / 8) {
896 for blk in row[..bw / 8].iter_mut() {
897 blk.ref_idx[ref_l] = ref_idx;
898 }
899 }
900 }
901 }
902}
903
904fn mvdiff4(mv1: MV, mv2: MV) -> bool {
905 let mv = mv1 - mv2;
906 (mv.x.abs() >= 4) || (mv.y.abs() >= 4)
907}