h264: rework MB reconstruction and fix loop filtering
[nihav.git] / nihav-itu / src / codecs / h264 / types.rs
CommitLineData
22de733b 1use nihav_core::frame::NASimpleVideoFrame;
696e4e20
KS
2use nihav_codec_support::codecs::{MV, ZERO_MV};
3use nihav_codec_support::data::GenericCache;
4use super::FrameRefs;
5
6#[repr(u8)]
7#[derive(Clone,Copy,Debug,PartialEq)]
8pub enum BMode {
9 L0,
10 L1,
11 Bi,
12}
13
14#[derive(Clone,Copy,Debug,PartialEq)]
15pub enum MBType {
16 Intra4x4,
17 Intra8x8,
18 Intra16x16(u8, u8, u8),
19 PCM,
20
21 P16x16,
22 P16x8,
23 P8x16,
24 P8x8,
25 P8x8Ref0,
26 PSkip,
27
28 Direct,
29 B16x16(BMode),
30 B16x8(BMode, BMode),
31 B8x16(BMode, BMode),
32 B8x8,
33 BSkip,
34}
35
36impl MBType {
37 pub fn is_intra(self) -> bool {
38 match self {
39 MBType::Intra4x4 | MBType::Intra8x8 | MBType::Intra16x16(_, _, _) | MBType::PCM => true,
40 _ => false,
41 }
42 }
43 pub fn is_intra16x16(self) -> bool {
44 if let MBType::Intra16x16(_, _, _) = self {
45 true
46 } else {
47 false
48 }
49 }
50 pub fn is_skip(self) -> bool {
51 match self {
52 MBType::PSkip | MBType::BSkip => true,
53 _ => false,
54 }
55 }
56 pub fn is_4x4(self) -> bool { self.num_parts() == 4 }
57 pub fn is_l0(self, part: usize) -> bool {
58 match self {
59 MBType::B16x16(mode) => mode == BMode::L0,
60 MBType::B16x8(mode0, mode1) | MBType::B8x16(mode0, mode1) => {
61 if part == 0 {
62 mode0 == BMode::L0
63 } else {
64 mode1 == BMode::L0
65 }
66 },
67 MBType::Direct | MBType::BSkip => false,
68 _ => true,
69 }
70 }
71 pub fn is_l1(self, part: usize) -> bool {
72 match self {
73 MBType::B16x16(mode) => mode == BMode::L1,
74 MBType::B16x8(mode0, mode1) | MBType::B8x16(mode0, mode1) => {
75 if part == 0 {
76 mode0 == BMode::L1
77 } else {
78 mode1 == BMode::L1
79 }
80 },
81 _ => false,
82 }
83 }
84 pub fn num_parts(self) -> usize {
85 match self {
86 MBType::Intra4x4 | MBType::Intra8x8 | MBType::Intra16x16(_, _, _) | MBType::PCM |
87 MBType::PSkip |
88 MBType::Direct | MBType::BSkip
89 => 1,
90 MBType::P16x16 |
91 MBType::B16x16(_)
92 => 1,
93 MBType::P16x8 | MBType::P8x16 |
94 MBType::B16x8(_, _) | MBType::B8x16(_, _)
95 => 2,
96 _ => 4,
97 }
98 }
99 pub fn size(self) -> (usize, usize) {
100 match self {
101 MBType::Intra4x4 |
102 MBType::Intra8x8 |
103 MBType::Intra16x16(_, _, _) |
104 MBType::PCM |
105 MBType::P16x16 |
106 MBType::PSkip |
107 MBType::Direct |
108 MBType::B16x16(_) |
109 MBType::BSkip
110 => (16, 16),
111 MBType::P16x8 | MBType::B16x8(_, _) => (16, 8),
112 MBType::P8x16 | MBType::B8x16(_, _) => (8, 16),
113 _ => (8, 8),
114 }
115 }
116}
117
118impl Default for MBType {
119 fn default() -> Self { MBType::Intra4x4 }
120}
121
122#[derive(Clone,Copy,Debug,PartialEq)]
123pub enum SubMBType {
124 P8x8,
125 P8x4,
126 P4x8,
127 P4x4,
128 Direct8x8,
129 B8x8(BMode),
130 B8x4(BMode),
131 B4x8(BMode),
132 B4x4(BMode),
133}
134
135impl SubMBType {
136 pub fn num_parts(self) -> usize {
137 match self {
138 SubMBType::P8x8 | SubMBType::Direct8x8 | SubMBType::B8x8(_) => 1,
139 SubMBType::P4x4 | SubMBType::B4x4(_) => 4,
140 _ => 2,
141 }
142 }
143 pub fn size(self) -> (usize, usize) {
144 match self {
145 SubMBType::P8x8 | SubMBType::Direct8x8 | SubMBType::B8x8(_) => (8, 8),
146 SubMBType::P8x4 | SubMBType::B8x4(_) => (8, 4),
147 SubMBType::P4x8 | SubMBType::B4x8(_) => (4, 8),
148 SubMBType::P4x4 | SubMBType::B4x4(_) => (4, 4),
149 }
150 }
151 pub fn is_l0(self) -> bool {
152 match self {
153 SubMBType::B8x8(mode) | SubMBType::B8x4(mode) |
154 SubMBType::B4x8(mode) | SubMBType::B4x4(mode) => {
155 mode == BMode::L0
156 },
157 _ => true,
158 }
159 }
160 pub fn is_l1(self) -> bool {
161 match self {
162 SubMBType::B8x8(mode) | SubMBType::B8x4(mode) |
163 SubMBType::B4x8(mode) | SubMBType::B4x4(mode) => {
164 mode == BMode::L1
165 },
166 _ => false,
167 }
168 }
169}
170
171impl Default for SubMBType {
172 fn default() -> Self { SubMBType::Direct8x8 }
173}
174
175#[repr(u8)]
176#[derive(Clone,Copy,Debug,PartialEq)]
177pub enum CompactMBType {
178 Intra4x4,
179 Intra8x8,
180 Intra16x16,
181 PCM,
182
183 P16x16,
184 P16x8,
185 P8x16,
186 P8x8,
187 P8x8Ref0,
188 PSkip,
189
190 Direct,
191 B16x16,
192 B16x8,
193 B8x16,
194 B8x8,
195 BSkip,
196
197 None,
198}
199
200impl CompactMBType {
201 pub fn is_intra(self) -> bool {
202 match self {
203 CompactMBType::Intra4x4 | CompactMBType::Intra8x8 | CompactMBType::Intra16x16 => true,
204 _ => false,
205 }
206 }
207 pub fn is_intra16orpcm(self) -> bool {
208 match self {
209 CompactMBType::Intra16x16 | CompactMBType::PCM => true,
210 _ => false,
211 }
212 }
213 pub fn is_skip(self) -> bool {
214 match self {
215 CompactMBType::PSkip | CompactMBType::BSkip => true,
216 _ => false,
217 }
218 }
219 pub fn is_direct(self) -> bool {
220 match self {
221 CompactMBType::BSkip | CompactMBType::Direct | CompactMBType::None => true,
222 _ => false,
223 }
224 }
225 pub fn is_inter(self) -> bool {
226 !self.is_intra() && !self.is_skip() && self != CompactMBType::PCM
227 }
228 pub fn is_16x16(self) -> bool {
229 match self {
230 CompactMBType::P16x8 | CompactMBType::P8x16 |
231 CompactMBType::P8x8 | CompactMBType::P8x8Ref0 |
232 CompactMBType::B16x8 | CompactMBType::B8x16 |
233 CompactMBType::B8x8 => false,
234 _ => true,
235 }
236 }
237}
238
239impl Default for CompactMBType {
240 fn default() -> Self { CompactMBType::None }
241}
242
243impl From<MBType> for CompactMBType {
244 fn from(mbtype: MBType) -> Self {
245 match mbtype {
246 MBType::Intra4x4 => CompactMBType::Intra4x4,
247 MBType::Intra8x8 => CompactMBType::Intra8x8,
248 MBType::Intra16x16(_, _, _) => CompactMBType::Intra16x16,
249 MBType::PCM => CompactMBType::PCM,
250 MBType::P16x16 => CompactMBType::P16x16,
251 MBType::P16x8 => CompactMBType::P16x8,
252 MBType::P8x16 => CompactMBType::P8x16,
253 MBType::P8x8 => CompactMBType::P8x8,
254 MBType::P8x8Ref0 => CompactMBType::P8x8Ref0,
255 MBType::PSkip => CompactMBType::PSkip,
256 MBType::Direct => CompactMBType::Direct,
257 MBType::B16x16(_) => CompactMBType::B16x16,
258 MBType::B16x8(_, _) => CompactMBType::B16x8,
259 MBType::B8x16(_, _) => CompactMBType::B8x16,
260 MBType::B8x8 => CompactMBType::B8x8,
261 MBType::BSkip => CompactMBType::BSkip,
262 }
263 }
264}
265
266#[repr(u8)]
267#[derive(Clone,Copy,Debug,PartialEq)]
268pub enum IntraPredMode {
269 Vertical,
270 Horizontal,
271 DC,
272 DiagDownLeft,
273 DiagDownRight,
274 VerRight,
275 HorDown,
276 VerLeft,
277 HorUp,
278 None,
279}
280
281impl IntraPredMode {
282 pub fn is_none(self) -> bool { self == IntraPredMode::None }
283 pub fn into_pred_idx(self) -> i8 {
284 if !self.is_none() {
285 self as u8 as i8
286 } else {
287 -1
288 }
289 }
290}
291
292impl Default for IntraPredMode {
293 fn default() -> Self { IntraPredMode::None }
294}
295
296impl From<u8> for IntraPredMode {
297 fn from(val: u8) -> Self {
298 match val {
299 0 => IntraPredMode::Vertical,
300 1 => IntraPredMode::Horizontal,
301 2 => IntraPredMode::DC,
302 3 => IntraPredMode::DiagDownLeft,
303 4 => IntraPredMode::DiagDownRight,
304 5 => IntraPredMode::VerRight,
305 6 => IntraPredMode::HorDown,
306 7 => IntraPredMode::VerLeft,
307 8 => IntraPredMode::HorUp,
308 _ => IntraPredMode::None,
309 }
310 }
311}
312
313impl Into<u8> for IntraPredMode {
314 fn into(self) -> u8 {
315 match self {
316 IntraPredMode::Vertical => 0,
317 IntraPredMode::Horizontal => 1,
318 IntraPredMode::DC => 2,
319 IntraPredMode::DiagDownLeft => 3,
320 IntraPredMode::DiagDownRight => 4,
321 IntraPredMode::VerRight => 5,
322 IntraPredMode::HorDown => 6,
323 IntraPredMode::VerLeft => 7,
324 IntraPredMode::HorUp => 8,
325 _ => 9,
326 }
327 }
328}
329
330pub const MISSING_POC: u16 = 0xFFFF;
331
332#[derive(Clone,Copy,Debug)]
333pub struct PicRef {
334 ref_idx: u8
335}
336
337pub const MISSING_REF: PicRef = PicRef { ref_idx: 0xFF };
338pub const INVALID_REF: PicRef = PicRef { ref_idx: 0xFE };
339pub const ZERO_REF: PicRef = PicRef { ref_idx: 0 };
340const DIRECT_FLAG: u8 = 0x40;
341
342impl PicRef {
343 pub fn new(ref_idx: u8) -> Self {
344 Self { ref_idx }
345 }
346 pub fn not_avail(self) -> bool {
347 self == MISSING_REF || self == INVALID_REF
348 }
349 pub fn index(self) -> usize { (self.ref_idx & !DIRECT_FLAG) as usize }
350 pub fn is_direct(self) -> bool { (self.ref_idx & DIRECT_FLAG) != 0 }
351 pub fn set_direct(&mut self) { self.ref_idx |= DIRECT_FLAG; }
352 fn min_pos(self, other: Self) -> Self {
353 match (self.not_avail(), other.not_avail()) {
354 (true, true) => self,
355 (false, true) => self,
356 (true, false) => other,
357 (false, false) => PicRef::new((self.ref_idx & !DIRECT_FLAG).min(other.ref_idx & !DIRECT_FLAG)),
358 }
359 }
360}
361
362impl Default for PicRef {
363 fn default() -> Self { MISSING_REF }
364}
365
366impl PartialEq for PicRef {
367 fn eq(&self, other: &Self) -> bool {
368 (self.ref_idx | DIRECT_FLAG) == (other.ref_idx | DIRECT_FLAG)
369 }
370}
371
372impl std::fmt::Display for PicRef {
373 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
374 if *self == MISSING_REF {
375 write!(f, "-1")
376 } else if *self == INVALID_REF {
377 write!(f, "-2")
378 } else {
379 write!(f, "{}", self.ref_idx & !DIRECT_FLAG)
380 }
381 }
382}
383
384#[derive(Clone,Copy,Default)]
385pub struct MBData {
386 pub mb_type: CompactMBType,
387 pub cbp: u8,
388 pub coded_flags: u32,
389 pub cmode: u8,
390 pub qp_y: u8,
391 pub qp_u: u8,
392 pub qp_v: u8,
393 pub transform_8x8: bool,
394}
395
396pub fn blk4_to_blk8(blk4: usize) -> usize {
397 const MAP: [usize; 16] = [ 0, 0, 1, 1, 0, 0, 1, 1, 2, 2, 3, 3, 2, 2, 3, 3 ];
398 MAP[blk4 & 0xF]
399}
400
401#[derive(Clone,Copy)]
402pub struct Blk8Data {
403 pub ref_idx: [PicRef; 2],
404 pub ncoded_c: [u8; 2],
405}
406
407impl Default for Blk8Data {
408 fn default() -> Self {
409 Self {
410 ref_idx: [MISSING_REF; 2],
411 ncoded_c: [0; 2],
412 }
413 }
414}
415
416#[derive(Clone,Copy,Default)]
417pub struct Blk4Data {
418 pub ncoded: u8,
419 pub ipred: IntraPredMode,
420 pub mv: [MV; 2],
421 pub mvd: [MV; 2],
422}
423
424pub struct SliceState {
425 pub mb_x: usize,
426 pub mb_y: usize,
427 pub mb_w: usize,
428 pub mb_h: usize,
429 pub mb_start: usize,
430
431 pub mb: GenericCache<MBData>,
432 pub blk8: GenericCache<Blk8Data>,
433 pub blk4: GenericCache<Blk4Data>,
434
22de733b 435 pub deblock: [u8; 16],
696e4e20
KS
436
437 pub has_top: bool,
438 pub has_left: bool,
22de733b
KS
439
440 pub top_line_y: Vec<u8>,
441 pub left_y: [u8; 17], // first element is top-left
442 pub top_line_c: [Vec<u8>; 2],
443 pub left_c: [[u8; 9]; 2],
696e4e20
KS
444}
445
446impl SliceState {
447 pub fn new() -> Self {
448 Self {
449 mb_x: 0,
450 mb_y: 0,
451 mb_w: 0,
452 mb_h: 0,
453 mb_start: 0,
454 mb: GenericCache::new(0, 0, MBData::default()),
455 blk8: GenericCache::new(0, 0, Blk8Data::default()),
456 blk4: GenericCache::new(0, 0, Blk4Data::default()),
457
22de733b 458 deblock: [0; 16],
696e4e20
KS
459
460 has_top: false,
461 has_left: false,
22de733b
KS
462
463 top_line_y: Vec::new(),
464 left_y: [0; 17],
465 top_line_c: [Vec::new(), Vec::new()],
466 left_c: [[0; 9]; 2],
696e4e20
KS
467 }
468 }
469 pub fn reset(&mut self, mb_w: usize, mb_h: usize, mb_pos: usize) {
470 self.mb_w = mb_w;
471 self.mb_h = mb_h;
472 self.mb_start = mb_pos;
473 if mb_w > 0 {
474 self.mb_x = mb_pos % mb_w;
475 self.mb_y = mb_pos / mb_w;
476 } else {
477 self.mb_x = 0;
478 self.mb_y = 0;
479 }
480 self.mb = GenericCache::new(1, mb_w + 2, MBData::default());
481 self.blk8 = GenericCache::new(2, mb_w * 2 + 2, Blk8Data::default());
482 self.blk4 = GenericCache::new(4, mb_w * 4 + 2, Blk4Data::default());
483
696e4e20
KS
484 self.has_top = false;
485 self.has_left = false;
22de733b
KS
486
487 self.top_line_y.resize(mb_w * 16 + 1, 0x80);
488 self.top_line_c[0].resize(mb_w * 8 + 1, 0x80);
489 self.top_line_c[1].resize(mb_w * 8 + 1, 0x80);
490 self.left_y = [0x80; 17];
491 self.left_c = [[0x80; 9]; 2];
696e4e20 492 }
22de733b
KS
493 pub fn save_ipred_context(&mut self, frm: &NASimpleVideoFrame<u8>) {
494 let dstoff = self.mb_x * 16;
495 let srcoff = frm.offset[0] + self.mb_x * 16 + self.mb_y * 16 * frm.stride[0];
496 self.left_y[0] = self.top_line_y[dstoff + 15];
497 self.top_line_y[dstoff..][..16].copy_from_slice(&frm.data[srcoff + frm.stride[0] * 15..][..16]);
498 for (dst, src) in self.left_y[1..].iter_mut().zip(frm.data[srcoff..].chunks(frm.stride[0])) {
499 *dst = src[15];
500 }
501 for chroma in 0..2 {
502 let cstride = frm.stride[chroma + 1];
503 let dstoff = self.mb_x * 8;
504 let srcoff = frm.offset[chroma + 1] + self.mb_x * 8 + self.mb_y * 8 * cstride;
505 self.left_c[chroma][0] = self.top_line_c[chroma][dstoff + 7];
506 self.top_line_c[chroma][dstoff..][..8].copy_from_slice(&frm.data[srcoff + cstride * 7..][..8]);
507 for (dst, src) in self.left_c[chroma][1..].iter_mut().zip(frm.data[srcoff..].chunks(cstride)) {
508 *dst = src[7];
509 }
510 }
511 }
512 pub fn fill_deblock(&mut self, frefs: &FrameRefs, deblock_mode: u8, is_s: bool) {
696e4e20
KS
513 if deblock_mode == 1 {
514 return;
515 }
516
22de733b
KS
517 self.deblock = [0; 16];
518
696e4e20
KS
519 let tx8x8 = self.get_cur_mb().transform_8x8;
520
696e4e20
KS
521 let cur_mbt = self.get_cur_mb().mb_type;
522 let left_mbt = self.get_left_mb().mb_type;
523 let mut top_mbt = self.get_top_mb().mb_type;
524 for y in 0..4 {
696e4e20 525 let can_do_top = y != 0 || (self.mb_y != 0 && (self.has_top || deblock_mode != 2));
22de733b 526 if can_do_top && (!tx8x8 || (y & 1) == 0) {
696e4e20
KS
527 if is_s || cur_mbt.is_intra() || top_mbt.is_intra() {
528 let val = if y == 0 { 0x40 } else { 0x30 };
22de733b 529 for el in self.deblock[y * 4..][..4].iter_mut() { *el |= val; }
696e4e20
KS
530 } else {
531 for x in 0..4 {
22de733b
KS
532 let blk4 = x + y * 4;
533 let blk8 = x / 2 + (y / 2) * 2;
534 if self.get_cur_blk4(blk4).ncoded != 0 || self.get_top_blk4(blk4).ncoded != 0 {
535 self.deblock[y * 4 + x] |= 0x20;
696e4e20 536 } else {
22de733b
KS
537 let cur_mv = self.get_cur_blk4(blk4).mv;
538 let top_mv = self.get_top_blk4(blk4).mv;
539 let cur_ref = self.get_cur_blk8(blk8).ref_idx;
540 let top_ref = if (y & 1) == 0 { self.get_top_blk8(blk8).ref_idx } else { cur_ref };
541 if mvdiff4(cur_mv[0], top_mv[0]) || mvdiff4(cur_mv[1], top_mv[1]) || !frefs.cmp_refs(cur_ref, top_ref) {
542 self.deblock[y * 4 + x] |= 0x10;
696e4e20
KS
543 }
544 }
545 }
546 }
547 }
548 let mut lleft_mbt = left_mbt;
549 for x in 0..4 {
22de733b 550 let skip_8 = tx8x8 && (x & 1) != 0;
696e4e20
KS
551 let can_do_left = x > 0 || self.has_left || (self.mb_x != 0 && deblock_mode != 2);
552 if !can_do_left {
553 continue;
554 }
555 let blk4 = x + y * 4;
556 let blk8 = x / 2 + (y / 2) * 2;
22de733b
KS
557 if skip_8 {
558 } else if is_s || cur_mbt.is_intra() || lleft_mbt.is_intra() {
559 self.deblock[y * 4 + x] |= if x == 0 { 4 } else { 3 };
560 } else if self.get_cur_blk4(blk4).ncoded != 0 || self.get_left_blk4(blk4).ncoded != 0 {
561 self.deblock[y * 4 + x] |= 2;
696e4e20
KS
562 } else {
563 let cur_mv = self.get_cur_blk4(blk4).mv;
564 let left_mv = self.get_left_blk4(blk4).mv;
565 let cur_ref = self.get_cur_blk8(blk8).ref_idx;
22de733b
KS
566 let left_ref = if (x & 1) == 0 { self.get_left_blk8(blk8).ref_idx } else { cur_ref };
567 if mvdiff4(cur_mv[0], left_mv[0]) || mvdiff4(cur_mv[1], left_mv[1]) || !frefs.cmp_refs(cur_ref, left_ref) {
568 self.deblock[y * 4 + x] |= 1;
696e4e20
KS
569 }
570 }
571 lleft_mbt = cur_mbt;
572 }
573 top_mbt = cur_mbt;
696e4e20
KS
574 }
575 }
576 pub fn next_mb(&mut self) {
577 self.mb_x += 1;
578 self.has_left = true;
579 if self.mb_x == self.mb_w {
580 self.mb_x = 0;
581 self.mb_y += 1;
582 self.mb.update_row();
583 self.blk8.update_row();
584 self.blk4.update_row();
585
696e4e20
KS
586 self.has_left = false;
587 }
588 self.has_top = self.mb_x + self.mb_y * self.mb_w >= self.mb_start + self.mb_w;
589 }
590 pub fn get_cur_mb_idx(&self) -> usize { self.mb.xpos + self.mb_x }
591 pub fn get_cur_blk8_idx(&self, blk_no: usize) -> usize {
592 self.blk8.xpos + self.mb_x * 2 + (blk_no & 1) + (blk_no >> 1) * self.blk8.stride
593 }
594 pub fn get_cur_blk4_idx(&self, blk_no: usize) -> usize {
595 self.blk4.xpos + self.mb_x * 4 + (blk_no & 3) + (blk_no >> 2) * self.blk4.stride
596 }
597 pub fn get_cur_mb(&mut self) -> &mut MBData {
598 let idx = self.get_cur_mb_idx();
599 &mut self.mb.data[idx]
600 }
601 pub fn get_left_mb(&self) -> &MBData {
602 &self.mb.data[self.get_cur_mb_idx() - 1]
603 }
604 pub fn get_top_mb(&self) -> &MBData {
605 &self.mb.data[self.get_cur_mb_idx() - self.mb.stride]
606 }
607 pub fn get_cur_blk8(&mut self, blk_no: usize) -> &mut Blk8Data {
608 let idx = self.get_cur_blk8_idx(blk_no);
609 &mut self.blk8.data[idx]
610 }
611 pub fn get_left_blk8(&self, blk_no: usize) -> &Blk8Data {
612 &self.blk8.data[self.get_cur_blk8_idx(blk_no) - 1]
613 }
614 pub fn get_top_blk8(&self, blk_no: usize) -> &Blk8Data {
615 &self.blk8.data[self.get_cur_blk8_idx(blk_no) - self.blk8.stride]
616 }
617 pub fn get_cur_blk4(&mut self, blk_no: usize) -> &mut Blk4Data {
618 let idx = self.get_cur_blk4_idx(blk_no);
619 &mut self.blk4.data[idx]
620 }
621 pub fn get_left_blk4(&self, blk_no: usize) -> &Blk4Data {
622 &self.blk4.data[self.get_cur_blk4_idx(blk_no) - 1]
623 }
624 pub fn get_top_blk4(&self, blk_no: usize) -> &Blk4Data {
625 &self.blk4.data[self.get_cur_blk4_idx(blk_no) - self.blk4.stride]
626 }
627
628 pub fn apply_to_blk8<F: (Fn(&mut Blk8Data))>(&mut self, f: F) {
629 let start = self.get_cur_blk8_idx(0);
630 for row in self.blk8.data[start..].chunks_mut(self.blk8.stride).take(2) {
631 for el in row[..2].iter_mut() {
632 f(el);
633 }
634 }
635 }
636 pub fn apply_to_blk4<F: (Fn(&mut Blk4Data))>(&mut self, f: F) {
637 let start = self.get_cur_blk4_idx(0);
638 for row in self.blk4.data[start..].chunks_mut(self.blk4.stride).take(4) {
639 for el in row[..4].iter_mut() {
640 f(el);
641 }
642 }
643 }
644
645 pub fn fill_ipred(&mut self, imode: IntraPredMode) {
646 self.apply_to_blk4(|blk| blk.ipred = imode);
647 }
648 pub fn fill_ncoded(&mut self, nc: u8) {
649 self.apply_to_blk4(|blk| blk.ncoded = nc);
650 self.apply_to_blk8(|blk| blk.ncoded_c = [nc; 2]);
651 }
652 pub fn reset_mb_mv(&mut self) {
653 self.apply_to_blk8(|blk| blk.ref_idx = [INVALID_REF; 2]);
654 }
655
656 pub fn get_mv_ctx(&self, xoff: usize, yoff: usize, ref_l: usize) -> (usize, usize) {
657 let blk_no = xoff / 4 + yoff;
658 let mv_a = self.get_left_blk4(blk_no).mvd[ref_l];
659 let mv_b = self.get_top_blk4(blk_no).mvd[ref_l];
660 let mv = mv_a + mv_b;
661 let ctx0 = if mv.x < 3 { 0 } else if mv.x <= 32 { 1 } else { 2 };
662 let ctx1 = if mv.y < 3 { 0 } else if mv.y <= 32 { 1 } else { 2 };
663 (ctx0, ctx1)
664 }
665 pub fn get_mv_ref_ctx(&self, xoff: usize, yoff: usize, ref_l: usize) -> usize {
666 let blk_no = xoff / 8 + (yoff / 8) * 2;
667 let mut ctx = 0;
668 let left_ref = self.get_left_blk8(blk_no).ref_idx[ref_l];
669 let top_ref = self.get_top_blk8(blk_no).ref_idx[ref_l];
670 if !left_ref.not_avail() && !left_ref.is_direct() && left_ref.index() > 0 {
671 ctx += 1;
672 }
673 if !top_ref.not_avail() && !top_ref.is_direct() && top_ref.index() > 0 {
674 ctx += 2;
675 }
676 ctx
677 }
4a1ca15c 678 #[allow(clippy::if_same_then_else)]
696e4e20
KS
679 pub fn predict(&mut self, xpos: usize, ypos: usize, bw: usize, bh: usize, ref_l: usize, diff_mv: MV, ref_idx: PicRef) {
680 let midx = self.get_cur_blk4_idx(0) + xpos / 4 + ypos / 4 * self.blk4.stride;
681 let ridx = self.get_cur_blk8_idx(0) + xpos / 8 + ypos / 8 * self.blk8.stride;
682 let ridx_c = self.get_cur_blk8_idx(0) + (xpos + bw) / 8 + ypos / 8 * self.blk8.stride - if (ypos & 4) == 0 { self.blk8.stride } else { 0 };
683
684 let mv_a = self.blk4.data[midx - 1].mv[ref_l];
685 let mv_b = self.blk4.data[midx - self.blk4.stride].mv[ref_l];
686 let mut mv_c = self.blk4.data[midx - self.blk4.stride + bw / 4].mv[ref_l];
687
688 let rx = if (xpos & 4) != 0 { 0 } else { 1 };
689 let ry = if (ypos & 4) != 0 { 0 } else { self.blk8.stride };
690 let ref_a = self.blk8.data[ridx - rx].ref_idx[ref_l];
691 let ref_b = self.blk8.data[ridx - ry].ref_idx[ref_l];
692 let mut ref_c = self.blk8.data[ridx_c].ref_idx[ref_l];
693
694 if ref_c == MISSING_REF || (((xpos + bw) & 4) == 0 && (ypos & 4) != 0) {
695 mv_c = self.blk4.data[midx - self.blk4.stride - 1].mv[ref_l];
696 ref_c = self.blk8.data[ridx - rx - ry].ref_idx[ref_l];
697 }
698
699 let pred_mv = if bw == 16 && bh == 8 && ypos == 0 && ref_b == ref_idx {
700 mv_b
701 } else if bw == 16 && bh == 8 && ypos != 0 && ref_a == ref_idx {
702 mv_a
703 } else if bw == 8 && bh == 16 && xpos == 0 && ref_a == ref_idx {
704 mv_a
705 } else if bw == 8 && bh == 16 && xpos != 0 && ref_c == ref_idx {
706 mv_c
707 } else if ref_b == MISSING_REF && ref_c == MISSING_REF {
708 mv_a
709 } else {
710 let count = ((ref_a == ref_idx) as u8) + ((ref_b == ref_idx) as u8) + ((ref_c == ref_idx) as u8);
711 if count == 1 {
712 if ref_a == ref_idx {
713 mv_a
714 } else if ref_b == ref_idx {
715 mv_b
716 } else {
717 mv_c
718 }
719 } else {
720 MV::pred(mv_a, mv_b, mv_c)
721 }
722 };
723
724 let mv = pred_mv + diff_mv;
725 self.fill_mv (xpos, ypos, bw, bh, ref_l, mv);
726 self.fill_ref(xpos, ypos, bw, bh, ref_l, ref_idx);
727 }
728 pub fn predict_pskip(&mut self) {
729 let midx = self.get_cur_blk4_idx(0);
730 let ridx = self.get_cur_blk8_idx(0);
731
732 let mv_a = self.blk4.data[midx - 1].mv[0];
733 let mv_b = self.blk4.data[midx - self.blk4.stride].mv[0];
734 let mut mv_c = self.blk4.data[midx - self.blk4.stride + 4].mv[0];
735
736 let ref_a = self.blk8.data[ridx - 1].ref_idx[0];
737 let ref_b = self.blk8.data[ridx - self.blk8.stride].ref_idx[0];
738 let mut ref_c = self.blk8.data[ridx - self.blk8.stride + 2].ref_idx[0];
739
740 if ref_c == MISSING_REF {
741 mv_c = self.blk4.data[midx - self.blk4.stride - 1].mv[0];
742 ref_c = self.blk8.data[ridx - self.blk8.stride - 1].ref_idx[0];
743 }
744
745 let ref_idx = ZERO_REF;
746 let mv = if ref_a == MISSING_REF || ref_b == MISSING_REF || (ref_a == ZERO_REF && mv_a == ZERO_MV) || (ref_b == ZERO_REF && mv_b == ZERO_MV) {
747 ZERO_MV
748 } else {
749 let count = ((ref_a == ref_idx) as u8) + ((ref_b == ref_idx) as u8) + ((ref_c == ref_idx) as u8);
750 if count == 1 {
751 if ref_a == ref_idx {
752 mv_a
753 } else if ref_b == ref_idx {
754 mv_b
755 } else {
756 mv_c
757 }
758 } else {
759 MV::pred(mv_a, mv_b, mv_c)
760 }
761 };
762
763 self.fill_mv (0, 0, 16, 16, 0, mv);
764 self.fill_ref(0, 0, 16, 16, 0, ref_idx);
765 }
766 pub fn predict_direct_mb(&mut self, frame_refs: &FrameRefs, temporal_mv: bool, cur_id: u16) {
767 let (col_mb, _, _) = frame_refs.get_colocated_info(self.mb_x, self.mb_y);
768 if col_mb.mb_type.is_16x16() || !temporal_mv {
769 let (mv0, ref0, mv1, ref1) = self.get_direct_mv(frame_refs, temporal_mv, cur_id, 0);
770 self.apply_to_blk4(|blk4| blk4.mv = [mv0, mv1]);
771 self.apply_to_blk8(|blk8| blk8.ref_idx = [ref0, ref1]);
772 } else {
773 for blk4 in 0..16 {
774 let (mv0, ref0, mv1, ref1) = self.get_direct_mv(frame_refs, temporal_mv, cur_id, blk4);
775 self.get_cur_blk4(blk4).mv = [mv0, mv1];
776 self.get_cur_blk8(blk4_to_blk8(blk4)).ref_idx = [ref0, ref1];
777 }
778 }
779 }
780 pub fn predict_direct_sub(&mut self, frame_refs: &FrameRefs, temporal_mv: bool, cur_id: u16, blk4: usize) {
4a1ca15c
KS
781 let (mv0, ref0, mv1, ref1) = self.get_direct_mv(frame_refs, temporal_mv, cur_id, blk4);
782 self.get_cur_blk4(blk4).mv = [mv0, mv1];
783 self.get_cur_blk8(blk4_to_blk8(blk4)).ref_idx = [ref0, ref1];
696e4e20
KS
784 }
785 pub fn get_direct_mv(&self, frame_refs: &FrameRefs, temporal_mv: bool, cur_id: u16, blk4: usize) -> (MV, PicRef, MV, PicRef) {
786 let (mbi, r1_poc, r1_long) = frame_refs.get_colocated_info(self.mb_x, self.mb_y);
787 let blk8 = blk4_to_blk8(blk4);
788 let (col_mv, r0_poc, col_idx) = if mbi.ref_poc[blk8] == [MISSING_POC; 2] {
789 (ZERO_MV, MISSING_POC, MISSING_REF)
790 } else if mbi.ref_poc[blk8][0] != MISSING_POC {
791 (mbi.mv[blk4][0], mbi.ref_poc[blk8][0], mbi.ref_idx[blk8][0])
792 } else {
793 (mbi.mv[blk4][1], mbi.ref_poc[blk8][1], mbi.ref_idx[blk8][1])
794 };
795 let (col_ref, r0_long) = frame_refs.map_ref0(r0_poc);
796 if temporal_mv {
797 let td = (i32::from(r1_poc) - i32::from(r0_poc)).max(-128).min(127);
798 if r0_long || td == 0 {
799 (col_mv, col_ref, ZERO_MV, ZERO_REF)
800 } else {
801 let tx = (16384 + (td / 2).abs()) / td;
802 let tb = (i32::from(cur_id) - i32::from(r0_poc)).max(-128).min(127);
803 let scale = ((tb * tx + 32) >> 6).max(-1024).min(1023);
804 let mv0 = MV {
805 x: ((i32::from(col_mv.x) * scale + 128) >> 8) as i16,
806 y: ((i32::from(col_mv.y) * scale + 128) >> 8) as i16,
807 };
808 let mv1 = mv0 - col_mv;
809 (mv0, col_ref, mv1, ZERO_REF)
810 }
811 } else {
812 let blk4 = 0; // we generate the same MV prediction for the whole MB
813 let blk8 = blk4_to_blk8(blk4);
814 let midx = self.get_cur_blk4_idx(blk4);
815 let ridx = self.get_cur_blk8_idx(blk8);
816 let ridx_c = self.get_cur_blk8_idx(blk8) + 16 / 8 - self.blk8.stride;
817
818 let mv_a = self.blk4.data[midx - 1].mv;
819 let mv_b = self.blk4.data[midx - self.blk4.stride].mv;
820 let mut mv_c = self.blk4.data[midx - self.blk4.stride + 16 / 4].mv;
821
822 let ref_a = self.blk8.data[ridx - 1].ref_idx;
823 let ref_b = self.blk8.data[ridx - self.blk8.stride].ref_idx;
824 let mut ref_c = self.blk8.data[ridx_c].ref_idx;
825
826 if ref_c == [MISSING_REF; 2] {
827 mv_c = self.blk4.data[midx - self.blk4.stride - 1].mv;
828 ref_c = self.blk8.data[ridx - self.blk8.stride - 1].ref_idx;
829 }
830 let mut refs = [INVALID_REF; 2];
831 for cur_ref in [ref_a, ref_b, ref_c].iter() {
832 refs[0] = refs[0].min_pos(cur_ref[0]);
833 refs[1] = refs[1].min_pos(cur_ref[1]);
834 }
835 if refs == [INVALID_REF; 2] {
836 return (ZERO_MV, ZERO_REF, ZERO_MV, ZERO_REF);
837 }
838
839 let mut col_zero = true;
840 if r1_long || col_idx != ZERO_REF {
841 col_zero = false;
842 }
843 if col_mv.x.abs() > 1 || col_mv.y.abs() > 1 {
844 col_zero = false;
845 }
846 let mut mvs = [ZERO_MV; 2];
847 for ref_l in 0..2 {
848 if mbi.mb_type.is_intra() || (!refs[ref_l].not_avail() && !(refs[ref_l] == ZERO_REF && col_zero)) {
849 let ref_idx = refs[ref_l];
850 mvs[ref_l] = if ref_b[ref_l] == MISSING_REF && ref_c[ref_l] == MISSING_REF {
851 mv_a[ref_l]
852 } else {
853 let count = ((ref_a[ref_l] == ref_idx) as u8) + ((ref_b[ref_l] == ref_idx) as u8) + ((ref_c[ref_l] == ref_idx) as u8);
854 if count == 1 {
855 if ref_a[ref_l] == ref_idx {
856 mv_a[ref_l]
857 } else if ref_b[ref_l] == ref_idx {
858 mv_b[ref_l]
859 } else {
860 mv_c[ref_l]
861 }
862 } else {
863 MV::pred(mv_a[ref_l], mv_b[ref_l], mv_c[ref_l])
864 }
865 };
866 }
867 }
868 (mvs[0], refs[0], mvs[1], refs[1])
869 }
870 }
871 pub fn fill_mv(&mut self, xpos: usize, ypos: usize, bw: usize, bh: usize, ref_l: usize, mv: MV) {
872 let start = self.get_cur_blk4_idx(0) + xpos / 4 + ypos / 4 * self.blk4.stride;
873 for row in self.blk4.data[start..].chunks_mut(self.blk4.stride).take(bh / 4) {
874 for blk in row[..bw / 4].iter_mut() {
875 blk.mv[ref_l] = mv;
876 }
877 }
878 }
879 pub fn fill_mvd(&mut self, xpos: usize, ypos: usize, bw: usize, bh: usize, ref_l: usize, mv: MV) {
880 let mvd = MV{ x: mv.x.abs().min(128), y: mv.y.abs().min(128) };
881 let start = self.get_cur_blk4_idx(0) + xpos / 4 + ypos / 4 * self.blk4.stride;
882 for row in self.blk4.data[start..].chunks_mut(self.blk4.stride).take(bh / 4) {
883 for blk in row[..bw / 4].iter_mut() {
884 blk.mvd[ref_l] = mvd;
885 }
886 }
887 }
888 pub fn fill_ref(&mut self, xpos: usize, ypos: usize, bw: usize, bh: usize, ref_l: usize, ref_idx: PicRef) {
889 let start = self.get_cur_blk8_idx(0) + xpos / 8 + ypos / 8 * self.blk8.stride;
890 if bw < 8 || bh < 8 {
891 self.blk8.data[start].ref_idx[ref_l] = ref_idx;
892 } else {
893 for row in self.blk8.data[start..].chunks_mut(self.blk8.stride).take(bh / 8) {
894 for blk in row[..bw / 8].iter_mut() {
895 blk.ref_idx[ref_l] = ref_idx;
896 }
897 }
898 }
899 }
900}
901
902fn mvdiff4(mv1: MV, mv2: MV) -> bool {
903 let mv = mv1 - mv2;
904 (mv.x.abs() >= 4) || (mv.y.abs() >= 4)
905}