f70819bc9b8d5a6e59195ddb28f735fde1e2b979
[nihav.git] / nihav-itu / src / codecs / h264 / types.rs
1 use nihav_core::frame::NASimpleVideoFrame;
2 use nihav_codec_support::codecs::{MV, ZERO_MV};
3 use nihav_codec_support::data::GenericCache;
4 use super::FrameRefs;
5 use super::pic_ref::FrameMBInfo;
6
7 #[repr(u8)]
8 #[derive(Clone,Copy,Debug,PartialEq)]
9 pub enum BMode {
10 L0,
11 L1,
12 Bi,
13 }
14
15 #[derive(Clone,Copy,Debug,PartialEq)]
16 pub enum MBType {
17 Intra4x4,
18 Intra8x8,
19 Intra16x16(u8, u8, u8),
20 PCM,
21
22 P16x16,
23 P16x8,
24 P8x16,
25 P8x8,
26 P8x8Ref0,
27 PSkip,
28
29 Direct,
30 B16x16(BMode),
31 B16x8(BMode, BMode),
32 B8x16(BMode, BMode),
33 B8x8,
34 BSkip,
35 }
36
37 impl MBType {
38 pub fn is_intra(self) -> bool {
39 match self {
40 MBType::Intra4x4 | MBType::Intra8x8 | MBType::Intra16x16(_, _, _) | MBType::PCM => true,
41 _ => false,
42 }
43 }
44 pub fn is_intra16x16(self) -> bool {
45 if let MBType::Intra16x16(_, _, _) = self {
46 true
47 } else {
48 false
49 }
50 }
51 pub fn is_skip(self) -> bool {
52 match self {
53 MBType::PSkip | MBType::BSkip => true,
54 _ => false,
55 }
56 }
57 pub fn is_4x4(self) -> bool { self.num_parts() == 4 }
58 pub fn is_l0(self, part: usize) -> bool {
59 match self {
60 MBType::B16x16(mode) => mode == BMode::L0,
61 MBType::B16x8(mode0, mode1) | MBType::B8x16(mode0, mode1) => {
62 if part == 0 {
63 mode0 == BMode::L0
64 } else {
65 mode1 == BMode::L0
66 }
67 },
68 MBType::Direct | MBType::BSkip => false,
69 _ => true,
70 }
71 }
72 pub fn is_l1(self, part: usize) -> bool {
73 match self {
74 MBType::B16x16(mode) => mode == BMode::L1,
75 MBType::B16x8(mode0, mode1) | MBType::B8x16(mode0, mode1) => {
76 if part == 0 {
77 mode0 == BMode::L1
78 } else {
79 mode1 == BMode::L1
80 }
81 },
82 _ => false,
83 }
84 }
85 pub fn num_parts(self) -> usize {
86 match self {
87 MBType::Intra4x4 | MBType::Intra8x8 | MBType::Intra16x16(_, _, _) | MBType::PCM |
88 MBType::PSkip |
89 MBType::Direct | MBType::BSkip
90 => 1,
91 MBType::P16x16 |
92 MBType::B16x16(_)
93 => 1,
94 MBType::P16x8 | MBType::P8x16 |
95 MBType::B16x8(_, _) | MBType::B8x16(_, _)
96 => 2,
97 _ => 4,
98 }
99 }
100 pub fn size(self) -> (usize, usize) {
101 match self {
102 MBType::Intra4x4 |
103 MBType::Intra8x8 |
104 MBType::Intra16x16(_, _, _) |
105 MBType::PCM |
106 MBType::P16x16 |
107 MBType::PSkip |
108 MBType::Direct |
109 MBType::B16x16(_) |
110 MBType::BSkip
111 => (16, 16),
112 MBType::P16x8 | MBType::B16x8(_, _) => (16, 8),
113 MBType::P8x16 | MBType::B8x16(_, _) => (8, 16),
114 _ => (8, 8),
115 }
116 }
117 }
118
119 impl Default for MBType {
120 fn default() -> Self { MBType::Intra4x4 }
121 }
122
123 #[derive(Clone,Copy,Debug,PartialEq)]
124 pub enum SubMBType {
125 P8x8,
126 P8x4,
127 P4x8,
128 P4x4,
129 Direct8x8,
130 B8x8(BMode),
131 B8x4(BMode),
132 B4x8(BMode),
133 B4x4(BMode),
134 }
135
136 impl SubMBType {
137 pub fn num_parts(self) -> usize {
138 match self {
139 SubMBType::P8x8 | SubMBType::Direct8x8 | SubMBType::B8x8(_) => 1,
140 SubMBType::P4x4 | SubMBType::B4x4(_) => 4,
141 _ => 2,
142 }
143 }
144 pub fn size(self) -> (usize, usize) {
145 match self {
146 SubMBType::P8x8 | SubMBType::Direct8x8 | SubMBType::B8x8(_) => (8, 8),
147 SubMBType::P8x4 | SubMBType::B8x4(_) => (8, 4),
148 SubMBType::P4x8 | SubMBType::B4x8(_) => (4, 8),
149 SubMBType::P4x4 | SubMBType::B4x4(_) => (4, 4),
150 }
151 }
152 pub fn is_l0(self) -> bool {
153 match self {
154 SubMBType::B8x8(mode) | SubMBType::B8x4(mode) |
155 SubMBType::B4x8(mode) | SubMBType::B4x4(mode) => {
156 mode == BMode::L0
157 },
158 _ => true,
159 }
160 }
161 pub fn is_l1(self) -> bool {
162 match self {
163 SubMBType::B8x8(mode) | SubMBType::B8x4(mode) |
164 SubMBType::B4x8(mode) | SubMBType::B4x4(mode) => {
165 mode == BMode::L1
166 },
167 _ => false,
168 }
169 }
170 }
171
172 impl Default for SubMBType {
173 fn default() -> Self { SubMBType::Direct8x8 }
174 }
175
176 #[repr(u8)]
177 #[derive(Clone,Copy,Debug,PartialEq)]
178 pub enum CompactMBType {
179 Intra4x4,
180 Intra8x8,
181 Intra16x16,
182 PCM,
183
184 P16x16,
185 P16x8,
186 P8x16,
187 P8x8,
188 P8x8Ref0,
189 PSkip,
190
191 Direct,
192 B16x16,
193 B16x8,
194 B8x16,
195 B8x8,
196 BSkip,
197
198 None,
199 }
200
201 impl CompactMBType {
202 pub fn is_intra(self) -> bool {
203 match self {
204 CompactMBType::Intra4x4 | CompactMBType::Intra8x8 | CompactMBType::Intra16x16 => true,
205 _ => false,
206 }
207 }
208 pub fn is_intra16orpcm(self) -> bool {
209 match self {
210 CompactMBType::Intra16x16 | CompactMBType::PCM => true,
211 _ => false,
212 }
213 }
214 pub fn is_skip(self) -> bool {
215 match self {
216 CompactMBType::PSkip | CompactMBType::BSkip => true,
217 _ => false,
218 }
219 }
220 pub fn is_direct(self) -> bool {
221 match self {
222 CompactMBType::BSkip | CompactMBType::Direct | CompactMBType::None => true,
223 _ => false,
224 }
225 }
226 pub fn is_inter(self) -> bool {
227 !self.is_intra() && !self.is_skip() && self != CompactMBType::PCM
228 }
229 pub fn is_16x16_ref(self) -> bool {
230 match self {
231 CompactMBType::Intra4x4 |
232 CompactMBType::Intra8x8 |
233 CompactMBType::Intra16x16 |
234 CompactMBType::PCM |
235 CompactMBType::P16x16 |
236 CompactMBType::B16x16 => true,
237 _ => false,
238 }
239 }
240 }
241
242 impl Default for CompactMBType {
243 fn default() -> Self { CompactMBType::None }
244 }
245
246 impl From<MBType> for CompactMBType {
247 fn from(mbtype: MBType) -> Self {
248 match mbtype {
249 MBType::Intra4x4 => CompactMBType::Intra4x4,
250 MBType::Intra8x8 => CompactMBType::Intra8x8,
251 MBType::Intra16x16(_, _, _) => CompactMBType::Intra16x16,
252 MBType::PCM => CompactMBType::PCM,
253 MBType::P16x16 => CompactMBType::P16x16,
254 MBType::P16x8 => CompactMBType::P16x8,
255 MBType::P8x16 => CompactMBType::P8x16,
256 MBType::P8x8 => CompactMBType::P8x8,
257 MBType::P8x8Ref0 => CompactMBType::P8x8Ref0,
258 MBType::PSkip => CompactMBType::PSkip,
259 MBType::Direct => CompactMBType::Direct,
260 MBType::B16x16(_) => CompactMBType::B16x16,
261 MBType::B16x8(_, _) => CompactMBType::B16x8,
262 MBType::B8x16(_, _) => CompactMBType::B8x16,
263 MBType::B8x8 => CompactMBType::B8x8,
264 MBType::BSkip => CompactMBType::BSkip,
265 }
266 }
267 }
268
269 #[repr(u8)]
270 #[derive(Clone,Copy,Debug,PartialEq)]
271 pub enum IntraPredMode {
272 Vertical,
273 Horizontal,
274 DC,
275 DiagDownLeft,
276 DiagDownRight,
277 VerRight,
278 HorDown,
279 VerLeft,
280 HorUp,
281 None,
282 }
283
284 impl IntraPredMode {
285 pub fn is_none(self) -> bool { self == IntraPredMode::None }
286 pub fn into_pred_idx(self) -> i8 {
287 if !self.is_none() {
288 self as u8 as i8
289 } else {
290 -1
291 }
292 }
293 }
294
295 impl Default for IntraPredMode {
296 fn default() -> Self { IntraPredMode::None }
297 }
298
299 impl From<u8> for IntraPredMode {
300 fn from(val: u8) -> Self {
301 match val {
302 0 => IntraPredMode::Vertical,
303 1 => IntraPredMode::Horizontal,
304 2 => IntraPredMode::DC,
305 3 => IntraPredMode::DiagDownLeft,
306 4 => IntraPredMode::DiagDownRight,
307 5 => IntraPredMode::VerRight,
308 6 => IntraPredMode::HorDown,
309 7 => IntraPredMode::VerLeft,
310 8 => IntraPredMode::HorUp,
311 _ => IntraPredMode::None,
312 }
313 }
314 }
315
316 impl Into<u8> for IntraPredMode {
317 fn into(self) -> u8 {
318 match self {
319 IntraPredMode::Vertical => 0,
320 IntraPredMode::Horizontal => 1,
321 IntraPredMode::DC => 2,
322 IntraPredMode::DiagDownLeft => 3,
323 IntraPredMode::DiagDownRight => 4,
324 IntraPredMode::VerRight => 5,
325 IntraPredMode::HorDown => 6,
326 IntraPredMode::VerLeft => 7,
327 IntraPredMode::HorUp => 8,
328 _ => 9,
329 }
330 }
331 }
332
333 pub const MISSING_POC: u16 = 0xFFFF;
334
335 #[derive(Clone,Copy,Debug)]
336 pub struct PicRef {
337 ref_idx: u8
338 }
339
340 pub const MISSING_REF: PicRef = PicRef { ref_idx: 0xFF };
341 pub const INVALID_REF: PicRef = PicRef { ref_idx: 0xFE };
342 pub const ZERO_REF: PicRef = PicRef { ref_idx: 0 };
343 const DIRECT_FLAG: u8 = 0x40;
344
345 impl PicRef {
346 pub fn new(ref_idx: u8) -> Self {
347 Self { ref_idx }
348 }
349 pub fn not_avail(self) -> bool {
350 self == MISSING_REF || self == INVALID_REF
351 }
352 pub fn index(self) -> usize { (self.ref_idx & !DIRECT_FLAG) as usize }
353 pub fn is_direct(self) -> bool { (self.ref_idx & DIRECT_FLAG) != 0 }
354 pub fn set_direct(&mut self) { self.ref_idx |= DIRECT_FLAG; }
355 fn min_pos(self, other: Self) -> Self {
356 match (self.not_avail(), other.not_avail()) {
357 (true, true) => self,
358 (false, true) => self,
359 (true, false) => other,
360 (false, false) => PicRef::new((self.ref_idx & !DIRECT_FLAG).min(other.ref_idx & !DIRECT_FLAG)),
361 }
362 }
363 }
364
365 impl Default for PicRef {
366 fn default() -> Self { MISSING_REF }
367 }
368
369 impl PartialEq for PicRef {
370 fn eq(&self, other: &Self) -> bool {
371 (self.ref_idx | DIRECT_FLAG) == (other.ref_idx | DIRECT_FLAG)
372 }
373 }
374
375 impl std::fmt::Display for PicRef {
376 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
377 if *self == MISSING_REF {
378 write!(f, "-1")
379 } else if *self == INVALID_REF {
380 write!(f, "-2")
381 } else {
382 write!(f, "{}", self.ref_idx & !DIRECT_FLAG)
383 }
384 }
385 }
386
387 #[derive(Clone,Copy,Default)]
388 pub struct MBData {
389 pub mb_type: CompactMBType,
390 pub cbp: u8,
391 pub coded_flags: u32,
392 pub cmode: u8,
393 pub qp_y: u8,
394 pub qp_u: u8,
395 pub qp_v: u8,
396 pub transform_8x8: bool,
397 }
398
399 pub fn blk4_to_blk8(blk4: usize) -> usize {
400 const MAP: [usize; 16] = [ 0, 0, 1, 1, 0, 0, 1, 1, 2, 2, 3, 3, 2, 2, 3, 3 ];
401 MAP[blk4 & 0xF]
402 }
403
404 #[derive(Clone,Copy)]
405 pub struct Blk8Data {
406 pub ref_idx: [PicRef; 2],
407 pub ncoded_c: [u8; 2],
408 }
409
410 impl Default for Blk8Data {
411 fn default() -> Self {
412 Self {
413 ref_idx: [MISSING_REF; 2],
414 ncoded_c: [0; 2],
415 }
416 }
417 }
418
419 #[derive(Clone,Copy,Default)]
420 pub struct Blk4Data {
421 pub ncoded: u8,
422 pub ipred: IntraPredMode,
423 pub mv: [MV; 2],
424 pub mvd: [MV; 2],
425 }
426
427 pub struct SliceState {
428 pub mb_x: usize,
429 pub mb_y: usize,
430 pub mb_w: usize,
431 pub mb_h: usize,
432 pub mb_start: usize,
433
434 pub mb: GenericCache<MBData>,
435 pub blk8: GenericCache<Blk8Data>,
436 pub blk4: GenericCache<Blk4Data>,
437
438 pub deblock: [u8; 16],
439
440 pub has_top: bool,
441 pub has_left: bool,
442
443 pub top_line_y: Vec<u8>,
444 pub left_y: [u8; 17], // first element is top-left
445 pub top_line_c: [Vec<u8>; 2],
446 pub left_c: [[u8; 9]; 2],
447 }
448
449 const BLK4_TO_D8: [usize; 16] = [ 0, 0, 3, 3, 0, 0, 3, 3, 12, 12, 15, 15, 12, 12, 15, 15 ];
450
451 impl SliceState {
452 pub fn new() -> Self {
453 Self {
454 mb_x: 0,
455 mb_y: 0,
456 mb_w: 0,
457 mb_h: 0,
458 mb_start: 0,
459 mb: GenericCache::new(0, 0, MBData::default()),
460 blk8: GenericCache::new(0, 0, Blk8Data::default()),
461 blk4: GenericCache::new(0, 0, Blk4Data::default()),
462
463 deblock: [0; 16],
464
465 has_top: false,
466 has_left: false,
467
468 top_line_y: Vec::new(),
469 left_y: [0; 17],
470 top_line_c: [Vec::new(), Vec::new()],
471 left_c: [[0; 9]; 2],
472 }
473 }
474 pub fn reset(&mut self, mb_w: usize, mb_h: usize, mb_pos: usize) {
475 self.mb_w = mb_w;
476 self.mb_h = mb_h;
477 self.mb_start = mb_pos;
478 if mb_w > 0 {
479 self.mb_x = mb_pos % mb_w;
480 self.mb_y = mb_pos / mb_w;
481 } else {
482 self.mb_x = 0;
483 self.mb_y = 0;
484 }
485 self.mb = GenericCache::new(1, mb_w + 2, MBData::default());
486 self.blk8 = GenericCache::new(2, mb_w * 2 + 2, Blk8Data::default());
487 self.blk4 = GenericCache::new(4, mb_w * 4 + 2, Blk4Data::default());
488
489 self.has_top = false;
490 self.has_left = false;
491
492 self.top_line_y.resize(mb_w * 16 + 1, 0x80);
493 self.top_line_c[0].resize(mb_w * 8 + 1, 0x80);
494 self.top_line_c[1].resize(mb_w * 8 + 1, 0x80);
495 self.left_y = [0x80; 17];
496 self.left_c = [[0x80; 9]; 2];
497 }
498 pub fn save_ipred_context(&mut self, frm: &NASimpleVideoFrame<u8>) {
499 let dstoff = self.mb_x * 16;
500 let srcoff = frm.offset[0] + self.mb_x * 16 + self.mb_y * 16 * frm.stride[0];
501 self.left_y[0] = self.top_line_y[dstoff + 15];
502 self.top_line_y[dstoff..][..16].copy_from_slice(&frm.data[srcoff + frm.stride[0] * 15..][..16]);
503 for (dst, src) in self.left_y[1..].iter_mut().zip(frm.data[srcoff..].chunks(frm.stride[0])) {
504 *dst = src[15];
505 }
506 for chroma in 0..2 {
507 let cstride = frm.stride[chroma + 1];
508 let dstoff = self.mb_x * 8;
509 let srcoff = frm.offset[chroma + 1] + self.mb_x * 8 + self.mb_y * 8 * cstride;
510 self.left_c[chroma][0] = self.top_line_c[chroma][dstoff + 7];
511 self.top_line_c[chroma][dstoff..][..8].copy_from_slice(&frm.data[srcoff + cstride * 7..][..8]);
512 for (dst, src) in self.left_c[chroma][1..].iter_mut().zip(frm.data[srcoff..].chunks(cstride)) {
513 *dst = src[7];
514 }
515 }
516 }
517 pub fn fill_deblock(&mut self, frefs: &FrameRefs, deblock_mode: u8, is_s: bool) {
518 if deblock_mode == 1 {
519 return;
520 }
521
522 self.deblock = [0; 16];
523
524 let tx8x8 = self.get_cur_mb().transform_8x8;
525
526 let cur_intra = self.get_cur_mb().mb_type.is_intra();
527 let left_intra = self.get_left_mb().mb_type.is_intra();
528 let mut top_intra = self.get_top_mb().mb_type.is_intra();
529 for y in 0..4 {
530 let can_do_top = y != 0 || (self.mb_y != 0 && (self.has_top || deblock_mode != 2));
531 if can_do_top && (!tx8x8 || (y & 1) == 0) {
532 if is_s || cur_intra || top_intra {
533 let val = if y == 0 { 0x40 } else { 0x30 };
534 for el in self.deblock[y * 4..][..4].iter_mut() { *el |= val; }
535 } else {
536 for x in 0..4 {
537 let blk4 = x + y * 4;
538 let blk8 = x / 2 + (y / 2) * 2;
539 if self.get_cur_blk4(blk4).ncoded != 0 || self.get_top_blk4(blk4).ncoded != 0 {
540 self.deblock[y * 4 + x] |= 0x20;
541 } else {
542 let cur_mv = self.get_cur_blk4(blk4).mv;
543 let top_mv = self.get_top_blk4(blk4).mv;
544 let cur_ref = self.get_cur_blk8(blk8).ref_idx;
545 let top_ref = if (y & 1) == 0 { self.get_top_blk8(blk8).ref_idx } else { cur_ref };
546 if mvdiff4(cur_mv[0], top_mv[0]) || mvdiff4(cur_mv[1], top_mv[1]) || !frefs.cmp_refs(cur_ref, top_ref) {
547 self.deblock[y * 4 + x] |= 0x10;
548 }
549 }
550 }
551 }
552 }
553 let mut lleft_intra = left_intra;
554 for x in 0..4 {
555 let skip_8 = tx8x8 && (x & 1) != 0;
556 let can_do_left = x > 0 || self.has_left || (self.mb_x != 0 && deblock_mode != 2);
557 if !can_do_left {
558 continue;
559 }
560 let blk4 = x + y * 4;
561 let blk8 = x / 2 + (y / 2) * 2;
562 if skip_8 {
563 } else if is_s || cur_intra || lleft_intra {
564 self.deblock[y * 4 + x] |= if x == 0 { 4 } else { 3 };
565 } else if self.get_cur_blk4(blk4).ncoded != 0 || self.get_left_blk4(blk4).ncoded != 0 {
566 self.deblock[y * 4 + x] |= 2;
567 } else {
568 let cur_mv = self.get_cur_blk4(blk4).mv;
569 let left_mv = self.get_left_blk4(blk4).mv;
570 let cur_ref = self.get_cur_blk8(blk8).ref_idx;
571 let left_ref = if (x & 1) == 0 { self.get_left_blk8(blk8).ref_idx } else { cur_ref };
572 if mvdiff4(cur_mv[0], left_mv[0]) || mvdiff4(cur_mv[1], left_mv[1]) || !frefs.cmp_refs(cur_ref, left_ref) {
573 self.deblock[y * 4 + x] |= 1;
574 }
575 }
576 lleft_intra = cur_intra;
577 }
578 top_intra = cur_intra;
579 }
580 }
581 pub fn next_mb(&mut self) {
582 self.mb_x += 1;
583 self.has_left = true;
584 if self.mb_x == self.mb_w {
585 self.mb_x = 0;
586 self.mb_y += 1;
587 self.mb.update_row();
588 self.blk8.update_row();
589 self.blk4.update_row();
590
591 self.has_left = false;
592 }
593 self.has_top = self.mb_x + self.mb_y * self.mb_w >= self.mb_start + self.mb_w;
594 }
595 pub fn get_cur_mb_idx(&self) -> usize { self.mb.xpos + self.mb_x }
596 pub fn get_cur_blk8_idx(&self, blk_no: usize) -> usize {
597 self.blk8.xpos + self.mb_x * 2 + (blk_no & 1) + (blk_no >> 1) * self.blk8.stride
598 }
599 pub fn get_cur_blk4_idx(&self, blk_no: usize) -> usize {
600 self.blk4.xpos + self.mb_x * 4 + (blk_no & 3) + (blk_no >> 2) * self.blk4.stride
601 }
602 pub fn get_cur_mb(&mut self) -> &mut MBData {
603 let idx = self.get_cur_mb_idx();
604 &mut self.mb.data[idx]
605 }
606 pub fn get_left_mb(&self) -> &MBData {
607 &self.mb.data[self.get_cur_mb_idx() - 1]
608 }
609 pub fn get_top_mb(&self) -> &MBData {
610 &self.mb.data[self.get_cur_mb_idx() - self.mb.stride]
611 }
612 pub fn get_cur_blk8(&mut self, blk_no: usize) -> &mut Blk8Data {
613 let idx = self.get_cur_blk8_idx(blk_no);
614 &mut self.blk8.data[idx]
615 }
616 pub fn get_left_blk8(&self, blk_no: usize) -> &Blk8Data {
617 &self.blk8.data[self.get_cur_blk8_idx(blk_no) - 1]
618 }
619 pub fn get_top_blk8(&self, blk_no: usize) -> &Blk8Data {
620 &self.blk8.data[self.get_cur_blk8_idx(blk_no) - self.blk8.stride]
621 }
622 pub fn get_cur_blk4(&mut self, blk_no: usize) -> &mut Blk4Data {
623 let idx = self.get_cur_blk4_idx(blk_no);
624 &mut self.blk4.data[idx]
625 }
626 pub fn get_left_blk4(&self, blk_no: usize) -> &Blk4Data {
627 &self.blk4.data[self.get_cur_blk4_idx(blk_no) - 1]
628 }
629 pub fn get_top_blk4(&self, blk_no: usize) -> &Blk4Data {
630 &self.blk4.data[self.get_cur_blk4_idx(blk_no) - self.blk4.stride]
631 }
632
633 pub fn apply_to_blk8<F: (Fn(&mut Blk8Data))>(&mut self, f: F) {
634 let start = self.get_cur_blk8_idx(0);
635 for row in self.blk8.data[start..].chunks_mut(self.blk8.stride).take(2) {
636 for el in row[..2].iter_mut() {
637 f(el);
638 }
639 }
640 }
641 pub fn apply_to_blk4<F: (Fn(&mut Blk4Data))>(&mut self, f: F) {
642 let start = self.get_cur_blk4_idx(0);
643 for row in self.blk4.data[start..].chunks_mut(self.blk4.stride).take(4) {
644 for el in row[..4].iter_mut() {
645 f(el);
646 }
647 }
648 }
649
650 pub fn fill_ipred(&mut self, imode: IntraPredMode) {
651 self.apply_to_blk4(|blk| blk.ipred = imode);
652 }
653 pub fn fill_ncoded(&mut self, nc: u8) {
654 self.apply_to_blk4(|blk| blk.ncoded = nc);
655 self.apply_to_blk8(|blk| blk.ncoded_c = [nc; 2]);
656 }
657 pub fn reset_mb_mv(&mut self) {
658 self.apply_to_blk8(|blk| blk.ref_idx = [INVALID_REF; 2]);
659 }
660
661 pub fn get_mv_ctx(&self, xoff: usize, yoff: usize, ref_l: usize) -> (usize, usize) {
662 let blk_no = xoff / 4 + yoff;
663 let mv_a = self.get_left_blk4(blk_no).mvd[ref_l];
664 let mv_b = self.get_top_blk4(blk_no).mvd[ref_l];
665 let mv = mv_a + mv_b;
666 let ctx0 = if mv.x < 3 { 0 } else if mv.x <= 32 { 1 } else { 2 };
667 let ctx1 = if mv.y < 3 { 0 } else if mv.y <= 32 { 1 } else { 2 };
668 (ctx0, ctx1)
669 }
670 pub fn get_mv_ref_ctx(&self, xoff: usize, yoff: usize, ref_l: usize) -> usize {
671 let blk_no = xoff / 8 + (yoff / 8) * 2;
672 let mut ctx = 0;
673 let left_ref = self.get_left_blk8(blk_no).ref_idx[ref_l];
674 let top_ref = self.get_top_blk8(blk_no).ref_idx[ref_l];
675 if !left_ref.not_avail() && !left_ref.is_direct() && left_ref.index() > 0 {
676 ctx += 1;
677 }
678 if !top_ref.not_avail() && !top_ref.is_direct() && top_ref.index() > 0 {
679 ctx += 2;
680 }
681 ctx
682 }
683 #[allow(clippy::if_same_then_else)]
684 pub fn predict(&mut self, xpos: usize, ypos: usize, bw: usize, bh: usize, ref_l: usize, diff_mv: MV, ref_idx: PicRef) {
685 let midx = self.get_cur_blk4_idx(0) + xpos / 4 + ypos / 4 * self.blk4.stride;
686 let ridx = self.get_cur_blk8_idx(0) + xpos / 8 + ypos / 8 * self.blk8.stride;
687 let ridx_c = self.get_cur_blk8_idx(0) + (xpos + bw) / 8 + ypos / 8 * self.blk8.stride - if (ypos & 4) == 0 { self.blk8.stride } else { 0 };
688
689 let mv_a = self.blk4.data[midx - 1].mv[ref_l];
690 let mv_b = self.blk4.data[midx - self.blk4.stride].mv[ref_l];
691 let mut mv_c = self.blk4.data[midx - self.blk4.stride + bw / 4].mv[ref_l];
692
693 let rx = if (xpos & 4) != 0 { 0 } else { 1 };
694 let ry = if (ypos & 4) != 0 { 0 } else { self.blk8.stride };
695 let ref_a = self.blk8.data[ridx - rx].ref_idx[ref_l];
696 let ref_b = self.blk8.data[ridx - ry].ref_idx[ref_l];
697 let mut ref_c = self.blk8.data[ridx_c].ref_idx[ref_l];
698
699 if ref_c == MISSING_REF || (((xpos + bw) & 4) == 0 && (ypos & 4) != 0) {
700 mv_c = self.blk4.data[midx - self.blk4.stride - 1].mv[ref_l];
701 ref_c = self.blk8.data[ridx - rx - ry].ref_idx[ref_l];
702 }
703
704 let pred_mv = if bw == 16 && bh == 8 && ypos == 0 && ref_b == ref_idx {
705 mv_b
706 } else if bw == 16 && bh == 8 && ypos != 0 && ref_a == ref_idx {
707 mv_a
708 } else if bw == 8 && bh == 16 && xpos == 0 && ref_a == ref_idx {
709 mv_a
710 } else if bw == 8 && bh == 16 && xpos != 0 && ref_c == ref_idx {
711 mv_c
712 } else if ref_b == MISSING_REF && ref_c == MISSING_REF {
713 mv_a
714 } else {
715 let count = ((ref_a == ref_idx) as u8) + ((ref_b == ref_idx) as u8) + ((ref_c == ref_idx) as u8);
716 if count == 1 {
717 if ref_a == ref_idx {
718 mv_a
719 } else if ref_b == ref_idx {
720 mv_b
721 } else {
722 mv_c
723 }
724 } else {
725 MV::pred(mv_a, mv_b, mv_c)
726 }
727 };
728
729 let mv = pred_mv + diff_mv;
730 self.fill_mv (xpos, ypos, bw, bh, ref_l, mv);
731 self.fill_ref(xpos, ypos, bw, bh, ref_l, ref_idx);
732 }
733 pub fn predict_pskip(&mut self) {
734 let midx = self.get_cur_blk4_idx(0);
735 let ridx = self.get_cur_blk8_idx(0);
736
737 let mv_a = self.blk4.data[midx - 1].mv[0];
738 let mv_b = self.blk4.data[midx - self.blk4.stride].mv[0];
739 let mut mv_c = self.blk4.data[midx - self.blk4.stride + 4].mv[0];
740
741 let ref_a = self.blk8.data[ridx - 1].ref_idx[0];
742 let ref_b = self.blk8.data[ridx - self.blk8.stride].ref_idx[0];
743 let mut ref_c = self.blk8.data[ridx - self.blk8.stride + 2].ref_idx[0];
744
745 if ref_c == MISSING_REF {
746 mv_c = self.blk4.data[midx - self.blk4.stride - 1].mv[0];
747 ref_c = self.blk8.data[ridx - self.blk8.stride - 1].ref_idx[0];
748 }
749
750 let ref_idx = ZERO_REF;
751 let mv = if ref_a == MISSING_REF || ref_b == MISSING_REF || (ref_a == ZERO_REF && mv_a == ZERO_MV) || (ref_b == ZERO_REF && mv_b == ZERO_MV) {
752 ZERO_MV
753 } else {
754 let count = ((ref_a == ref_idx) as u8) + ((ref_b == ref_idx) as u8) + ((ref_c == ref_idx) as u8);
755 if count == 1 {
756 if ref_a == ref_idx {
757 mv_a
758 } else if ref_b == ref_idx {
759 mv_b
760 } else {
761 mv_c
762 }
763 } else {
764 MV::pred(mv_a, mv_b, mv_c)
765 }
766 };
767
768 self.fill_mv (0, 0, 16, 16, 0, mv);
769 self.fill_ref(0, 0, 16, 16, 0, ref_idx);
770 }
771 pub fn predict_direct_mb(&mut self, frame_refs: &FrameRefs, temporal_mv: bool, direct_8x8: bool, cur_id: u16) {
772 let (col_mb, r1_poc, r1_long) = frame_refs.get_colocated_info(self.mb_x, self.mb_y);
773 if direct_8x8 {
774 for blk4 in 0..16 {
775 let (mv0, ref0, mv1, ref1) = self.get_direct_mv(frame_refs, &col_mb, r1_poc, r1_long, temporal_mv, cur_id, BLK4_TO_D8[blk4]);
776 self.get_cur_blk4(blk4).mv = [mv0, mv1];
777 self.get_cur_blk8(blk4_to_blk8(blk4)).ref_idx = [ref0, ref1];
778 }
779 } else if col_mb.mb_type.is_16x16_ref() || !temporal_mv {
780 let (mv0, ref0, mv1, ref1) = self.get_direct_mv(frame_refs, &col_mb, r1_poc, r1_long, temporal_mv, cur_id, 0);
781 self.apply_to_blk4(|blk4| blk4.mv = [mv0, mv1]);
782 self.apply_to_blk8(|blk8| blk8.ref_idx = [ref0, ref1]);
783 } else {
784 for blk4 in 0..16 {
785 let (mv0, ref0, mv1, ref1) = self.get_direct_mv(frame_refs, &col_mb, r1_poc, r1_long, temporal_mv, cur_id, blk4);
786 self.get_cur_blk4(blk4).mv = [mv0, mv1];
787 self.get_cur_blk8(blk4_to_blk8(blk4)).ref_idx = [ref0, ref1];
788 }
789 }
790 }
791 pub fn predict_direct_sub(&mut self, frame_refs: &FrameRefs, temporal_mv: bool, direct8x8: bool, cur_id: u16, blk4: usize) {
792 let src_blk = if !direct8x8 { blk4 } else { BLK4_TO_D8[blk4] };
793 let (mbi, r1_poc, r1_long) = frame_refs.get_colocated_info(self.mb_x, self.mb_y);
794 let (mv0, ref0, mv1, ref1) = self.get_direct_mv(frame_refs, &mbi, r1_poc, r1_long, temporal_mv, cur_id, src_blk);
795 self.get_cur_blk4(blk4).mv = [mv0, mv1];
796 self.get_cur_blk8(blk4_to_blk8(blk4)).ref_idx = [ref0, ref1];
797 }
798 pub fn get_direct_mv(&self, frame_refs: &FrameRefs, mbi: &FrameMBInfo, r1_poc: u16, r1_long: bool, temporal_mv: bool, cur_id: u16, blk4: usize) -> (MV, PicRef, MV, PicRef) {
799 let blk8 = blk4_to_blk8(blk4);
800 let (col_mv, r0_poc, col_idx) = if mbi.ref_poc[blk8] == [MISSING_POC; 2] {
801 (ZERO_MV, MISSING_POC, MISSING_REF)
802 } else if mbi.ref_poc[blk8][0] != MISSING_POC {
803 (mbi.mv[blk4][0], mbi.ref_poc[blk8][0], mbi.ref_idx[blk8][0])
804 } else {
805 (mbi.mv[blk4][1], mbi.ref_poc[blk8][1], mbi.ref_idx[blk8][1])
806 };
807 let (col_ref, r0_long) = frame_refs.map_ref0(r0_poc);
808 if temporal_mv {
809 let td = (i32::from(r1_poc) - i32::from(r0_poc)).max(-128).min(127);
810 if r0_long || td == 0 {
811 (col_mv, col_ref, ZERO_MV, ZERO_REF)
812 } else {
813 let tx = (16384 + (td / 2).abs()) / td;
814 let tb = (i32::from(cur_id) - i32::from(r0_poc)).max(-128).min(127);
815 let scale = ((tb * tx + 32) >> 6).max(-1024).min(1023);
816 let mv0 = MV {
817 x: ((i32::from(col_mv.x) * scale + 128) >> 8) as i16,
818 y: ((i32::from(col_mv.y) * scale + 128) >> 8) as i16,
819 };
820 let mv1 = mv0 - col_mv;
821 (mv0, col_ref, mv1, ZERO_REF)
822 }
823 } else {
824 let blk4 = 0; // we generate the same MV prediction for the whole MB
825 let blk8 = blk4_to_blk8(blk4);
826 let midx = self.get_cur_blk4_idx(blk4);
827 let ridx = self.get_cur_blk8_idx(blk8);
828 let ridx_c = self.get_cur_blk8_idx(blk8) + 16 / 8 - self.blk8.stride;
829
830 let mv_a = self.blk4.data[midx - 1].mv;
831 let mv_b = self.blk4.data[midx - self.blk4.stride].mv;
832 let mut mv_c = self.blk4.data[midx - self.blk4.stride + 16 / 4].mv;
833
834 let ref_a = self.blk8.data[ridx - 1].ref_idx;
835 let ref_b = self.blk8.data[ridx - self.blk8.stride].ref_idx;
836 let mut ref_c = self.blk8.data[ridx_c].ref_idx;
837
838 if ref_c == [MISSING_REF; 2] {
839 mv_c = self.blk4.data[midx - self.blk4.stride - 1].mv;
840 ref_c = self.blk8.data[ridx - self.blk8.stride - 1].ref_idx;
841 }
842 let mut refs = [INVALID_REF; 2];
843 for cur_ref in [ref_a, ref_b, ref_c].iter() {
844 refs[0] = refs[0].min_pos(cur_ref[0]);
845 refs[1] = refs[1].min_pos(cur_ref[1]);
846 }
847 if refs == [INVALID_REF; 2] {
848 return (ZERO_MV, ZERO_REF, ZERO_MV, ZERO_REF);
849 }
850
851 let mut col_zero = true;
852 if r1_long || col_idx != ZERO_REF {
853 col_zero = false;
854 }
855 if col_mv.x.abs() > 1 || col_mv.y.abs() > 1 {
856 col_zero = false;
857 }
858 let mut mvs = [ZERO_MV; 2];
859 for ref_l in 0..2 {
860 if mbi.mb_type.is_intra() || (!refs[ref_l].not_avail() && !(refs[ref_l] == ZERO_REF && col_zero)) {
861 let ref_idx = refs[ref_l];
862 mvs[ref_l] = if ref_b[ref_l] == MISSING_REF && ref_c[ref_l] == MISSING_REF {
863 mv_a[ref_l]
864 } else {
865 let count = ((ref_a[ref_l] == ref_idx) as u8) + ((ref_b[ref_l] == ref_idx) as u8) + ((ref_c[ref_l] == ref_idx) as u8);
866 if count == 1 {
867 if ref_a[ref_l] == ref_idx {
868 mv_a[ref_l]
869 } else if ref_b[ref_l] == ref_idx {
870 mv_b[ref_l]
871 } else {
872 mv_c[ref_l]
873 }
874 } else {
875 MV::pred(mv_a[ref_l], mv_b[ref_l], mv_c[ref_l])
876 }
877 };
878 }
879 }
880 (mvs[0], refs[0], mvs[1], refs[1])
881 }
882 }
883 pub fn fill_mv(&mut self, xpos: usize, ypos: usize, bw: usize, bh: usize, ref_l: usize, mv: MV) {
884 let start = self.get_cur_blk4_idx(0) + xpos / 4 + ypos / 4 * self.blk4.stride;
885 for row in self.blk4.data[start..].chunks_mut(self.blk4.stride).take(bh / 4) {
886 for blk in row[..bw / 4].iter_mut() {
887 blk.mv[ref_l] = mv;
888 }
889 }
890 }
891 pub fn fill_mvd(&mut self, xpos: usize, ypos: usize, bw: usize, bh: usize, ref_l: usize, mv: MV) {
892 let mvd = MV{ x: mv.x.abs().min(128), y: mv.y.abs().min(128) };
893 let start = self.get_cur_blk4_idx(0) + xpos / 4 + ypos / 4 * self.blk4.stride;
894 for row in self.blk4.data[start..].chunks_mut(self.blk4.stride).take(bh / 4) {
895 for blk in row[..bw / 4].iter_mut() {
896 blk.mvd[ref_l] = mvd;
897 }
898 }
899 }
900 pub fn fill_ref(&mut self, xpos: usize, ypos: usize, bw: usize, bh: usize, ref_l: usize, ref_idx: PicRef) {
901 let start = self.get_cur_blk8_idx(0) + xpos / 8 + ypos / 8 * self.blk8.stride;
902 if bw < 8 || bh < 8 {
903 self.blk8.data[start].ref_idx[ref_l] = ref_idx;
904 } else {
905 for row in self.blk8.data[start..].chunks_mut(self.blk8.stride).take(bh / 8) {
906 for blk in row[..bw / 8].iter_mut() {
907 blk.ref_idx[ref_l] = ref_idx;
908 }
909 }
910 }
911 }
912 }
913
914 fn mvdiff4(mv1: MV, mv2: MV) -> bool {
915 let mv = mv1 - mv2;
916 (mv.x.abs() >= 4) || (mv.y.abs() >= 4)
917 }