mostly working ITU H.264 decoder
[nihav.git] / nihav-itu / src / codecs / h264 / types.rs
1 use nihav_codec_support::codecs::{MV, ZERO_MV};
2 use nihav_codec_support::data::GenericCache;
3 use super::FrameRefs;
4
5 #[repr(u8)]
6 #[derive(Clone,Copy,Debug,PartialEq)]
7 pub enum BMode {
8 L0,
9 L1,
10 Bi,
11 }
12
13 #[derive(Clone,Copy,Debug,PartialEq)]
14 pub enum MBType {
15 Intra4x4,
16 Intra8x8,
17 Intra16x16(u8, u8, u8),
18 PCM,
19
20 P16x16,
21 P16x8,
22 P8x16,
23 P8x8,
24 P8x8Ref0,
25 PSkip,
26
27 Direct,
28 B16x16(BMode),
29 B16x8(BMode, BMode),
30 B8x16(BMode, BMode),
31 B8x8,
32 BSkip,
33 }
34
35 impl MBType {
36 pub fn is_intra(self) -> bool {
37 match self {
38 MBType::Intra4x4 | MBType::Intra8x8 | MBType::Intra16x16(_, _, _) | MBType::PCM => true,
39 _ => false,
40 }
41 }
42 pub fn is_intra16x16(self) -> bool {
43 if let MBType::Intra16x16(_, _, _) = self {
44 true
45 } else {
46 false
47 }
48 }
49 pub fn is_skip(self) -> bool {
50 match self {
51 MBType::PSkip | MBType::BSkip => true,
52 _ => false,
53 }
54 }
55 pub fn is_4x4(self) -> bool { self.num_parts() == 4 }
56 pub fn is_l0(self, part: usize) -> bool {
57 match self {
58 MBType::B16x16(mode) => mode == BMode::L0,
59 MBType::B16x8(mode0, mode1) | MBType::B8x16(mode0, mode1) => {
60 if part == 0 {
61 mode0 == BMode::L0
62 } else {
63 mode1 == BMode::L0
64 }
65 },
66 MBType::Direct | MBType::BSkip => false,
67 _ => true,
68 }
69 }
70 pub fn is_l1(self, part: usize) -> bool {
71 match self {
72 MBType::B16x16(mode) => mode == BMode::L1,
73 MBType::B16x8(mode0, mode1) | MBType::B8x16(mode0, mode1) => {
74 if part == 0 {
75 mode0 == BMode::L1
76 } else {
77 mode1 == BMode::L1
78 }
79 },
80 _ => false,
81 }
82 }
83 pub fn num_parts(self) -> usize {
84 match self {
85 MBType::Intra4x4 | MBType::Intra8x8 | MBType::Intra16x16(_, _, _) | MBType::PCM |
86 MBType::PSkip |
87 MBType::Direct | MBType::BSkip
88 => 1,
89 MBType::P16x16 |
90 MBType::B16x16(_)
91 => 1,
92 MBType::P16x8 | MBType::P8x16 |
93 MBType::B16x8(_, _) | MBType::B8x16(_, _)
94 => 2,
95 _ => 4,
96 }
97 }
98 pub fn size(self) -> (usize, usize) {
99 match self {
100 MBType::Intra4x4 |
101 MBType::Intra8x8 |
102 MBType::Intra16x16(_, _, _) |
103 MBType::PCM |
104 MBType::P16x16 |
105 MBType::PSkip |
106 MBType::Direct |
107 MBType::B16x16(_) |
108 MBType::BSkip
109 => (16, 16),
110 MBType::P16x8 | MBType::B16x8(_, _) => (16, 8),
111 MBType::P8x16 | MBType::B8x16(_, _) => (8, 16),
112 _ => (8, 8),
113 }
114 }
115 }
116
117 impl Default for MBType {
118 fn default() -> Self { MBType::Intra4x4 }
119 }
120
121 #[derive(Clone,Copy,Debug,PartialEq)]
122 pub enum SubMBType {
123 P8x8,
124 P8x4,
125 P4x8,
126 P4x4,
127 Direct8x8,
128 B8x8(BMode),
129 B8x4(BMode),
130 B4x8(BMode),
131 B4x4(BMode),
132 }
133
134 impl SubMBType {
135 pub fn num_parts(self) -> usize {
136 match self {
137 SubMBType::P8x8 | SubMBType::Direct8x8 | SubMBType::B8x8(_) => 1,
138 SubMBType::P4x4 | SubMBType::B4x4(_) => 4,
139 _ => 2,
140 }
141 }
142 pub fn size(self) -> (usize, usize) {
143 match self {
144 SubMBType::P8x8 | SubMBType::Direct8x8 | SubMBType::B8x8(_) => (8, 8),
145 SubMBType::P8x4 | SubMBType::B8x4(_) => (8, 4),
146 SubMBType::P4x8 | SubMBType::B4x8(_) => (4, 8),
147 SubMBType::P4x4 | SubMBType::B4x4(_) => (4, 4),
148 }
149 }
150 pub fn is_l0(self) -> bool {
151 match self {
152 SubMBType::B8x8(mode) | SubMBType::B8x4(mode) |
153 SubMBType::B4x8(mode) | SubMBType::B4x4(mode) => {
154 mode == BMode::L0
155 },
156 _ => true,
157 }
158 }
159 pub fn is_l1(self) -> bool {
160 match self {
161 SubMBType::B8x8(mode) | SubMBType::B8x4(mode) |
162 SubMBType::B4x8(mode) | SubMBType::B4x4(mode) => {
163 mode == BMode::L1
164 },
165 _ => false,
166 }
167 }
168 }
169
170 impl Default for SubMBType {
171 fn default() -> Self { SubMBType::Direct8x8 }
172 }
173
174 #[repr(u8)]
175 #[derive(Clone,Copy,Debug,PartialEq)]
176 pub enum CompactMBType {
177 Intra4x4,
178 Intra8x8,
179 Intra16x16,
180 PCM,
181
182 P16x16,
183 P16x8,
184 P8x16,
185 P8x8,
186 P8x8Ref0,
187 PSkip,
188
189 Direct,
190 B16x16,
191 B16x8,
192 B8x16,
193 B8x8,
194 BSkip,
195
196 None,
197 }
198
199 impl CompactMBType {
200 pub fn is_intra(self) -> bool {
201 match self {
202 CompactMBType::Intra4x4 | CompactMBType::Intra8x8 | CompactMBType::Intra16x16 => true,
203 _ => false,
204 }
205 }
206 pub fn is_intra16orpcm(self) -> bool {
207 match self {
208 CompactMBType::Intra16x16 | CompactMBType::PCM => true,
209 _ => false,
210 }
211 }
212 pub fn is_skip(self) -> bool {
213 match self {
214 CompactMBType::PSkip | CompactMBType::BSkip => true,
215 _ => false,
216 }
217 }
218 pub fn is_direct(self) -> bool {
219 match self {
220 CompactMBType::BSkip | CompactMBType::Direct | CompactMBType::None => true,
221 _ => false,
222 }
223 }
224 pub fn is_inter(self) -> bool {
225 !self.is_intra() && !self.is_skip() && self != CompactMBType::PCM
226 }
227 pub fn is_16x16(self) -> bool {
228 match self {
229 CompactMBType::P16x8 | CompactMBType::P8x16 |
230 CompactMBType::P8x8 | CompactMBType::P8x8Ref0 |
231 CompactMBType::B16x8 | CompactMBType::B8x16 |
232 CompactMBType::B8x8 => false,
233 _ => true,
234 }
235 }
236 }
237
238 impl Default for CompactMBType {
239 fn default() -> Self { CompactMBType::None }
240 }
241
242 impl From<MBType> for CompactMBType {
243 fn from(mbtype: MBType) -> Self {
244 match mbtype {
245 MBType::Intra4x4 => CompactMBType::Intra4x4,
246 MBType::Intra8x8 => CompactMBType::Intra8x8,
247 MBType::Intra16x16(_, _, _) => CompactMBType::Intra16x16,
248 MBType::PCM => CompactMBType::PCM,
249 MBType::P16x16 => CompactMBType::P16x16,
250 MBType::P16x8 => CompactMBType::P16x8,
251 MBType::P8x16 => CompactMBType::P8x16,
252 MBType::P8x8 => CompactMBType::P8x8,
253 MBType::P8x8Ref0 => CompactMBType::P8x8Ref0,
254 MBType::PSkip => CompactMBType::PSkip,
255 MBType::Direct => CompactMBType::Direct,
256 MBType::B16x16(_) => CompactMBType::B16x16,
257 MBType::B16x8(_, _) => CompactMBType::B16x8,
258 MBType::B8x16(_, _) => CompactMBType::B8x16,
259 MBType::B8x8 => CompactMBType::B8x8,
260 MBType::BSkip => CompactMBType::BSkip,
261 }
262 }
263 }
264
265 #[repr(u8)]
266 #[derive(Clone,Copy,Debug,PartialEq)]
267 pub enum IntraPredMode {
268 Vertical,
269 Horizontal,
270 DC,
271 DiagDownLeft,
272 DiagDownRight,
273 VerRight,
274 HorDown,
275 VerLeft,
276 HorUp,
277 None,
278 }
279
280 impl IntraPredMode {
281 pub fn is_none(self) -> bool { self == IntraPredMode::None }
282 pub fn into_pred_idx(self) -> i8 {
283 if !self.is_none() {
284 self as u8 as i8
285 } else {
286 -1
287 }
288 }
289 }
290
291 impl Default for IntraPredMode {
292 fn default() -> Self { IntraPredMode::None }
293 }
294
295 impl From<u8> for IntraPredMode {
296 fn from(val: u8) -> Self {
297 match val {
298 0 => IntraPredMode::Vertical,
299 1 => IntraPredMode::Horizontal,
300 2 => IntraPredMode::DC,
301 3 => IntraPredMode::DiagDownLeft,
302 4 => IntraPredMode::DiagDownRight,
303 5 => IntraPredMode::VerRight,
304 6 => IntraPredMode::HorDown,
305 7 => IntraPredMode::VerLeft,
306 8 => IntraPredMode::HorUp,
307 _ => IntraPredMode::None,
308 }
309 }
310 }
311
312 impl Into<u8> for IntraPredMode {
313 fn into(self) -> u8 {
314 match self {
315 IntraPredMode::Vertical => 0,
316 IntraPredMode::Horizontal => 1,
317 IntraPredMode::DC => 2,
318 IntraPredMode::DiagDownLeft => 3,
319 IntraPredMode::DiagDownRight => 4,
320 IntraPredMode::VerRight => 5,
321 IntraPredMode::HorDown => 6,
322 IntraPredMode::VerLeft => 7,
323 IntraPredMode::HorUp => 8,
324 _ => 9,
325 }
326 }
327 }
328
329 pub const MISSING_POC: u16 = 0xFFFF;
330
331 #[derive(Clone,Copy,Debug)]
332 pub struct PicRef {
333 ref_idx: u8
334 }
335
336 pub const MISSING_REF: PicRef = PicRef { ref_idx: 0xFF };
337 pub const INVALID_REF: PicRef = PicRef { ref_idx: 0xFE };
338 pub const ZERO_REF: PicRef = PicRef { ref_idx: 0 };
339 const DIRECT_FLAG: u8 = 0x40;
340
341 impl PicRef {
342 pub fn new(ref_idx: u8) -> Self {
343 Self { ref_idx }
344 }
345 pub fn not_avail(self) -> bool {
346 self == MISSING_REF || self == INVALID_REF
347 }
348 pub fn index(self) -> usize { (self.ref_idx & !DIRECT_FLAG) as usize }
349 pub fn is_direct(self) -> bool { (self.ref_idx & DIRECT_FLAG) != 0 }
350 pub fn set_direct(&mut self) { self.ref_idx |= DIRECT_FLAG; }
351 fn min_pos(self, other: Self) -> Self {
352 match (self.not_avail(), other.not_avail()) {
353 (true, true) => self,
354 (false, true) => self,
355 (true, false) => other,
356 (false, false) => PicRef::new((self.ref_idx & !DIRECT_FLAG).min(other.ref_idx & !DIRECT_FLAG)),
357 }
358 }
359 }
360
361 impl Default for PicRef {
362 fn default() -> Self { MISSING_REF }
363 }
364
365 impl PartialEq for PicRef {
366 fn eq(&self, other: &Self) -> bool {
367 (self.ref_idx | DIRECT_FLAG) == (other.ref_idx | DIRECT_FLAG)
368 }
369 }
370
371 impl std::fmt::Display for PicRef {
372 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
373 if *self == MISSING_REF {
374 write!(f, "-1")
375 } else if *self == INVALID_REF {
376 write!(f, "-2")
377 } else {
378 write!(f, "{}", self.ref_idx & !DIRECT_FLAG)
379 }
380 }
381 }
382
383 #[derive(Clone,Copy,Default)]
384 pub struct MBData {
385 pub mb_type: CompactMBType,
386 pub cbp: u8,
387 pub coded_flags: u32,
388 pub cmode: u8,
389 pub qp_y: u8,
390 pub qp_u: u8,
391 pub qp_v: u8,
392 pub transform_8x8: bool,
393 }
394
395 pub fn blk4_to_blk8(blk4: usize) -> usize {
396 const MAP: [usize; 16] = [ 0, 0, 1, 1, 0, 0, 1, 1, 2, 2, 3, 3, 2, 2, 3, 3 ];
397 MAP[blk4 & 0xF]
398 }
399
400 #[derive(Clone,Copy)]
401 pub struct Blk8Data {
402 pub ref_idx: [PicRef; 2],
403 pub ncoded_c: [u8; 2],
404 }
405
406 impl Default for Blk8Data {
407 fn default() -> Self {
408 Self {
409 ref_idx: [MISSING_REF; 2],
410 ncoded_c: [0; 2],
411 }
412 }
413 }
414
415 #[derive(Clone,Copy,Default)]
416 pub struct Blk4Data {
417 pub ncoded: u8,
418 pub ipred: IntraPredMode,
419 pub mv: [MV; 2],
420 pub mvd: [MV; 2],
421 }
422
423 pub struct SliceState {
424 pub mb_x: usize,
425 pub mb_y: usize,
426 pub mb_w: usize,
427 pub mb_h: usize,
428 pub mb_start: usize,
429
430 pub mb: GenericCache<MBData>,
431 pub blk8: GenericCache<Blk8Data>,
432 pub blk4: GenericCache<Blk4Data>,
433
434 pub deblock: GenericCache<u8>,
435
436 pub has_top: bool,
437 pub has_left: bool,
438 }
439
440 impl SliceState {
441 pub fn new() -> Self {
442 Self {
443 mb_x: 0,
444 mb_y: 0,
445 mb_w: 0,
446 mb_h: 0,
447 mb_start: 0,
448 mb: GenericCache::new(0, 0, MBData::default()),
449 blk8: GenericCache::new(0, 0, Blk8Data::default()),
450 blk4: GenericCache::new(0, 0, Blk4Data::default()),
451
452 deblock: GenericCache::new(0, 0, 0),
453
454 has_top: false,
455 has_left: false,
456 }
457 }
458 pub fn reset(&mut self, mb_w: usize, mb_h: usize, mb_pos: usize) {
459 self.mb_w = mb_w;
460 self.mb_h = mb_h;
461 self.mb_start = mb_pos;
462 if mb_w > 0 {
463 self.mb_x = mb_pos % mb_w;
464 self.mb_y = mb_pos / mb_w;
465 } else {
466 self.mb_x = 0;
467 self.mb_y = 0;
468 }
469 self.mb = GenericCache::new(1, mb_w + 2, MBData::default());
470 self.blk8 = GenericCache::new(2, mb_w * 2 + 2, Blk8Data::default());
471 self.blk4 = GenericCache::new(4, mb_w * 4 + 2, Blk4Data::default());
472
473 self.deblock = GenericCache::new(4, mb_w * 4 + 1, 0);
474
475 self.has_top = false;
476 self.has_left = false;
477 }
478 pub fn fill_deblock(&mut self, deblock_mode: u8, is_s: bool) {
479 if deblock_mode == 1 {
480 return;
481 }
482
483 let tx8x8 = self.get_cur_mb().transform_8x8;
484
485 let mut idx = self.deblock.xpos + self.mb_x * 4;
486 let cur_mbt = self.get_cur_mb().mb_type;
487 let left_mbt = self.get_left_mb().mb_type;
488 let mut top_mbt = self.get_top_mb().mb_type;
489 for y in 0..4 {
490 if tx8x8 && (y & 1) != 0 {
491 continue;
492 }
493 let can_do_top = y != 0 || (self.mb_y != 0 && (self.has_top || deblock_mode != 2));
494 if can_do_top {
495 if is_s || cur_mbt.is_intra() || top_mbt.is_intra() {
496 let val = if y == 0 { 0x40 } else { 0x30 };
497 for el in self.deblock.data[idx..][..4].iter_mut() { *el |= val; }
498 } else {
499 for x in 0..4 {
500 if self.get_cur_blk4(x).ncoded != 0 || self.get_top_blk4(x).ncoded != 0 {
501 self.deblock.data[idx + x] |= 0x20;
502 } else {
503 let cur_mv = self.get_cur_blk4(x).mv;
504 let top_mv = self.get_top_blk4(x).mv;
505 let cur_ref = self.get_cur_blk8(x / 2).ref_idx;
506 let top_ref = self.get_top_blk8(x / 2).ref_idx;
507 if mvdiff4(cur_mv[0], top_mv[0]) || mvdiff4(cur_mv[1], top_mv[1]) || cur_ref != top_ref {
508 self.deblock.data[idx + x] |= 0x10;
509 }
510 }
511 }
512 }
513 }
514 let mut lleft_mbt = left_mbt;
515 for x in 0..4 {
516 if tx8x8 && (x & 1) != 0 {
517 continue;
518 }
519 let can_do_left = x > 0 || self.has_left || (self.mb_x != 0 && deblock_mode != 2);
520 if !can_do_left {
521 continue;
522 }
523 let blk4 = x + y * 4;
524 let blk8 = x / 2 + (y / 2) * 2;
525 if is_s || cur_mbt.is_intra() || lleft_mbt.is_intra() {
526 self.deblock.data[idx + x] |= if x == 0 { 4 } else { 3 };
527 } else if self.get_cur_blk4(blk4).ncoded != 0 || self.get_top_blk4(blk4).ncoded != 0 {
528 self.deblock.data[idx + x] |= 2;
529 } else {
530 let cur_mv = self.get_cur_blk4(blk4).mv;
531 let left_mv = self.get_left_blk4(blk4).mv;
532 let cur_ref = self.get_cur_blk8(blk8).ref_idx;
533 let left_ref = self.get_left_blk8(blk8).ref_idx;
534 if mvdiff4(cur_mv[0], left_mv[0]) || mvdiff4(cur_mv[1], left_mv[1]) || cur_ref != left_ref {
535 self.deblock.data[idx + x] |= 1;
536 }
537 }
538 lleft_mbt = cur_mbt;
539 }
540 top_mbt = cur_mbt;
541 idx += self.deblock.stride;
542 }
543 }
544 pub fn next_mb(&mut self) {
545 self.mb_x += 1;
546 self.has_left = true;
547 if self.mb_x == self.mb_w {
548 self.mb_x = 0;
549 self.mb_y += 1;
550 self.mb.update_row();
551 self.blk8.update_row();
552 self.blk4.update_row();
553
554 self.deblock.update_row();
555
556 self.has_left = false;
557 }
558 self.has_top = self.mb_x + self.mb_y * self.mb_w >= self.mb_start + self.mb_w;
559 }
560 pub fn get_cur_mb_idx(&self) -> usize { self.mb.xpos + self.mb_x }
561 pub fn get_cur_blk8_idx(&self, blk_no: usize) -> usize {
562 self.blk8.xpos + self.mb_x * 2 + (blk_no & 1) + (blk_no >> 1) * self.blk8.stride
563 }
564 pub fn get_cur_blk4_idx(&self, blk_no: usize) -> usize {
565 self.blk4.xpos + self.mb_x * 4 + (blk_no & 3) + (blk_no >> 2) * self.blk4.stride
566 }
567 pub fn get_cur_mb(&mut self) -> &mut MBData {
568 let idx = self.get_cur_mb_idx();
569 &mut self.mb.data[idx]
570 }
571 pub fn get_left_mb(&self) -> &MBData {
572 &self.mb.data[self.get_cur_mb_idx() - 1]
573 }
574 pub fn get_top_mb(&self) -> &MBData {
575 &self.mb.data[self.get_cur_mb_idx() - self.mb.stride]
576 }
577 pub fn get_cur_blk8(&mut self, blk_no: usize) -> &mut Blk8Data {
578 let idx = self.get_cur_blk8_idx(blk_no);
579 &mut self.blk8.data[idx]
580 }
581 pub fn get_left_blk8(&self, blk_no: usize) -> &Blk8Data {
582 &self.blk8.data[self.get_cur_blk8_idx(blk_no) - 1]
583 }
584 pub fn get_top_blk8(&self, blk_no: usize) -> &Blk8Data {
585 &self.blk8.data[self.get_cur_blk8_idx(blk_no) - self.blk8.stride]
586 }
587 pub fn get_cur_blk4(&mut self, blk_no: usize) -> &mut Blk4Data {
588 let idx = self.get_cur_blk4_idx(blk_no);
589 &mut self.blk4.data[idx]
590 }
591 pub fn get_left_blk4(&self, blk_no: usize) -> &Blk4Data {
592 &self.blk4.data[self.get_cur_blk4_idx(blk_no) - 1]
593 }
594 pub fn get_top_blk4(&self, blk_no: usize) -> &Blk4Data {
595 &self.blk4.data[self.get_cur_blk4_idx(blk_no) - self.blk4.stride]
596 }
597
598 pub fn apply_to_blk8<F: (Fn(&mut Blk8Data))>(&mut self, f: F) {
599 let start = self.get_cur_blk8_idx(0);
600 for row in self.blk8.data[start..].chunks_mut(self.blk8.stride).take(2) {
601 for el in row[..2].iter_mut() {
602 f(el);
603 }
604 }
605 }
606 pub fn apply_to_blk4<F: (Fn(&mut Blk4Data))>(&mut self, f: F) {
607 let start = self.get_cur_blk4_idx(0);
608 for row in self.blk4.data[start..].chunks_mut(self.blk4.stride).take(4) {
609 for el in row[..4].iter_mut() {
610 f(el);
611 }
612 }
613 }
614
615 pub fn fill_ipred(&mut self, imode: IntraPredMode) {
616 self.apply_to_blk4(|blk| blk.ipred = imode);
617 }
618 pub fn fill_ncoded(&mut self, nc: u8) {
619 self.apply_to_blk4(|blk| blk.ncoded = nc);
620 self.apply_to_blk8(|blk| blk.ncoded_c = [nc; 2]);
621 }
622 pub fn reset_mb_mv(&mut self) {
623 self.apply_to_blk8(|blk| blk.ref_idx = [INVALID_REF; 2]);
624 }
625
626 pub fn get_mv_ctx(&self, xoff: usize, yoff: usize, ref_l: usize) -> (usize, usize) {
627 let blk_no = xoff / 4 + yoff;
628 let mv_a = self.get_left_blk4(blk_no).mvd[ref_l];
629 let mv_b = self.get_top_blk4(blk_no).mvd[ref_l];
630 let mv = mv_a + mv_b;
631 let ctx0 = if mv.x < 3 { 0 } else if mv.x <= 32 { 1 } else { 2 };
632 let ctx1 = if mv.y < 3 { 0 } else if mv.y <= 32 { 1 } else { 2 };
633 (ctx0, ctx1)
634 }
635 pub fn get_mv_ref_ctx(&self, xoff: usize, yoff: usize, ref_l: usize) -> usize {
636 let blk_no = xoff / 8 + (yoff / 8) * 2;
637 let mut ctx = 0;
638 let left_ref = self.get_left_blk8(blk_no).ref_idx[ref_l];
639 let top_ref = self.get_top_blk8(blk_no).ref_idx[ref_l];
640 if !left_ref.not_avail() && !left_ref.is_direct() && left_ref.index() > 0 {
641 ctx += 1;
642 }
643 if !top_ref.not_avail() && !top_ref.is_direct() && top_ref.index() > 0 {
644 ctx += 2;
645 }
646 ctx
647 }
648 pub fn predict(&mut self, xpos: usize, ypos: usize, bw: usize, bh: usize, ref_l: usize, diff_mv: MV, ref_idx: PicRef) {
649 let midx = self.get_cur_blk4_idx(0) + xpos / 4 + ypos / 4 * self.blk4.stride;
650 let ridx = self.get_cur_blk8_idx(0) + xpos / 8 + ypos / 8 * self.blk8.stride;
651 let ridx_c = self.get_cur_blk8_idx(0) + (xpos + bw) / 8 + ypos / 8 * self.blk8.stride - if (ypos & 4) == 0 { self.blk8.stride } else { 0 };
652
653 let mv_a = self.blk4.data[midx - 1].mv[ref_l];
654 let mv_b = self.blk4.data[midx - self.blk4.stride].mv[ref_l];
655 let mut mv_c = self.blk4.data[midx - self.blk4.stride + bw / 4].mv[ref_l];
656
657 let rx = if (xpos & 4) != 0 { 0 } else { 1 };
658 let ry = if (ypos & 4) != 0 { 0 } else { self.blk8.stride };
659 let ref_a = self.blk8.data[ridx - rx].ref_idx[ref_l];
660 let ref_b = self.blk8.data[ridx - ry].ref_idx[ref_l];
661 let mut ref_c = self.blk8.data[ridx_c].ref_idx[ref_l];
662
663 if ref_c == MISSING_REF || (((xpos + bw) & 4) == 0 && (ypos & 4) != 0) {
664 mv_c = self.blk4.data[midx - self.blk4.stride - 1].mv[ref_l];
665 ref_c = self.blk8.data[ridx - rx - ry].ref_idx[ref_l];
666 }
667
668 let pred_mv = if bw == 16 && bh == 8 && ypos == 0 && ref_b == ref_idx {
669 mv_b
670 } else if bw == 16 && bh == 8 && ypos != 0 && ref_a == ref_idx {
671 mv_a
672 } else if bw == 8 && bh == 16 && xpos == 0 && ref_a == ref_idx {
673 mv_a
674 } else if bw == 8 && bh == 16 && xpos != 0 && ref_c == ref_idx {
675 mv_c
676 } else if ref_b == MISSING_REF && ref_c == MISSING_REF {
677 mv_a
678 } else {
679 let count = ((ref_a == ref_idx) as u8) + ((ref_b == ref_idx) as u8) + ((ref_c == ref_idx) as u8);
680 if count == 1 {
681 if ref_a == ref_idx {
682 mv_a
683 } else if ref_b == ref_idx {
684 mv_b
685 } else {
686 mv_c
687 }
688 } else {
689 MV::pred(mv_a, mv_b, mv_c)
690 }
691 };
692
693 let mv = pred_mv + diff_mv;
694 self.fill_mv (xpos, ypos, bw, bh, ref_l, mv);
695 self.fill_ref(xpos, ypos, bw, bh, ref_l, ref_idx);
696 }
697 pub fn predict_pskip(&mut self) {
698 let midx = self.get_cur_blk4_idx(0);
699 let ridx = self.get_cur_blk8_idx(0);
700
701 let mv_a = self.blk4.data[midx - 1].mv[0];
702 let mv_b = self.blk4.data[midx - self.blk4.stride].mv[0];
703 let mut mv_c = self.blk4.data[midx - self.blk4.stride + 4].mv[0];
704
705 let ref_a = self.blk8.data[ridx - 1].ref_idx[0];
706 let ref_b = self.blk8.data[ridx - self.blk8.stride].ref_idx[0];
707 let mut ref_c = self.blk8.data[ridx - self.blk8.stride + 2].ref_idx[0];
708
709 if ref_c == MISSING_REF {
710 mv_c = self.blk4.data[midx - self.blk4.stride - 1].mv[0];
711 ref_c = self.blk8.data[ridx - self.blk8.stride - 1].ref_idx[0];
712 }
713
714 let ref_idx = ZERO_REF;
715 let mv = if ref_a == MISSING_REF || ref_b == MISSING_REF || (ref_a == ZERO_REF && mv_a == ZERO_MV) || (ref_b == ZERO_REF && mv_b == ZERO_MV) {
716 ZERO_MV
717 } else {
718 let count = ((ref_a == ref_idx) as u8) + ((ref_b == ref_idx) as u8) + ((ref_c == ref_idx) as u8);
719 if count == 1 {
720 if ref_a == ref_idx {
721 mv_a
722 } else if ref_b == ref_idx {
723 mv_b
724 } else {
725 mv_c
726 }
727 } else {
728 MV::pred(mv_a, mv_b, mv_c)
729 }
730 };
731
732 self.fill_mv (0, 0, 16, 16, 0, mv);
733 self.fill_ref(0, 0, 16, 16, 0, ref_idx);
734 }
735 pub fn predict_direct_mb(&mut self, frame_refs: &FrameRefs, temporal_mv: bool, cur_id: u16) {
736 let (col_mb, _, _) = frame_refs.get_colocated_info(self.mb_x, self.mb_y);
737 if col_mb.mb_type.is_16x16() || !temporal_mv {
738 let (mv0, ref0, mv1, ref1) = self.get_direct_mv(frame_refs, temporal_mv, cur_id, 0);
739 self.apply_to_blk4(|blk4| blk4.mv = [mv0, mv1]);
740 self.apply_to_blk8(|blk8| blk8.ref_idx = [ref0, ref1]);
741 } else {
742 for blk4 in 0..16 {
743 let (mv0, ref0, mv1, ref1) = self.get_direct_mv(frame_refs, temporal_mv, cur_id, blk4);
744 self.get_cur_blk4(blk4).mv = [mv0, mv1];
745 self.get_cur_blk8(blk4_to_blk8(blk4)).ref_idx = [ref0, ref1];
746 }
747 }
748 }
749 pub fn predict_direct_sub(&mut self, frame_refs: &FrameRefs, temporal_mv: bool, cur_id: u16, blk4: usize) {
750 if temporal_mv {
751 let (mv0, ref0, mv1, ref1) = self.get_direct_mv(frame_refs, temporal_mv, cur_id, blk4);
752 self.get_cur_blk4(blk4).mv = [mv0, mv1];
753 self.get_cur_blk8(blk4_to_blk8(blk4)).ref_idx = [ref0, ref1];
754 } else {
755 let (mv0, ref0, mv1, ref1) = self.get_direct_mv(frame_refs, temporal_mv, cur_id, blk4);
756 self.get_cur_blk4(blk4).mv = [mv0, mv1];
757 self.get_cur_blk8(blk4_to_blk8(blk4)).ref_idx = [ref0, ref1];
758 }
759 }
760 pub fn get_direct_mv(&self, frame_refs: &FrameRefs, temporal_mv: bool, cur_id: u16, blk4: usize) -> (MV, PicRef, MV, PicRef) {
761 let (mbi, r1_poc, r1_long) = frame_refs.get_colocated_info(self.mb_x, self.mb_y);
762 let blk8 = blk4_to_blk8(blk4);
763 let (col_mv, r0_poc, col_idx) = if mbi.ref_poc[blk8] == [MISSING_POC; 2] {
764 (ZERO_MV, MISSING_POC, MISSING_REF)
765 } else if mbi.ref_poc[blk8][0] != MISSING_POC {
766 (mbi.mv[blk4][0], mbi.ref_poc[blk8][0], mbi.ref_idx[blk8][0])
767 } else {
768 (mbi.mv[blk4][1], mbi.ref_poc[blk8][1], mbi.ref_idx[blk8][1])
769 };
770 let (col_ref, r0_long) = frame_refs.map_ref0(r0_poc);
771 if temporal_mv {
772 let td = (i32::from(r1_poc) - i32::from(r0_poc)).max(-128).min(127);
773 if r0_long || td == 0 {
774 (col_mv, col_ref, ZERO_MV, ZERO_REF)
775 } else {
776 let tx = (16384 + (td / 2).abs()) / td;
777 let tb = (i32::from(cur_id) - i32::from(r0_poc)).max(-128).min(127);
778 let scale = ((tb * tx + 32) >> 6).max(-1024).min(1023);
779 let mv0 = MV {
780 x: ((i32::from(col_mv.x) * scale + 128) >> 8) as i16,
781 y: ((i32::from(col_mv.y) * scale + 128) >> 8) as i16,
782 };
783 let mv1 = mv0 - col_mv;
784 (mv0, col_ref, mv1, ZERO_REF)
785 }
786 } else {
787 let blk4 = 0; // we generate the same MV prediction for the whole MB
788 let blk8 = blk4_to_blk8(blk4);
789 let midx = self.get_cur_blk4_idx(blk4);
790 let ridx = self.get_cur_blk8_idx(blk8);
791 let ridx_c = self.get_cur_blk8_idx(blk8) + 16 / 8 - self.blk8.stride;
792
793 let mv_a = self.blk4.data[midx - 1].mv;
794 let mv_b = self.blk4.data[midx - self.blk4.stride].mv;
795 let mut mv_c = self.blk4.data[midx - self.blk4.stride + 16 / 4].mv;
796
797 let ref_a = self.blk8.data[ridx - 1].ref_idx;
798 let ref_b = self.blk8.data[ridx - self.blk8.stride].ref_idx;
799 let mut ref_c = self.blk8.data[ridx_c].ref_idx;
800
801 if ref_c == [MISSING_REF; 2] {
802 mv_c = self.blk4.data[midx - self.blk4.stride - 1].mv;
803 ref_c = self.blk8.data[ridx - self.blk8.stride - 1].ref_idx;
804 }
805 let mut refs = [INVALID_REF; 2];
806 for cur_ref in [ref_a, ref_b, ref_c].iter() {
807 refs[0] = refs[0].min_pos(cur_ref[0]);
808 refs[1] = refs[1].min_pos(cur_ref[1]);
809 }
810 if refs == [INVALID_REF; 2] {
811 return (ZERO_MV, ZERO_REF, ZERO_MV, ZERO_REF);
812 }
813
814 let mut col_zero = true;
815 if r1_long || col_idx != ZERO_REF {
816 col_zero = false;
817 }
818 if col_mv.x.abs() > 1 || col_mv.y.abs() > 1 {
819 col_zero = false;
820 }
821 let mut mvs = [ZERO_MV; 2];
822 for ref_l in 0..2 {
823 if mbi.mb_type.is_intra() || (!refs[ref_l].not_avail() && !(refs[ref_l] == ZERO_REF && col_zero)) {
824 let ref_idx = refs[ref_l];
825 mvs[ref_l] = if ref_b[ref_l] == MISSING_REF && ref_c[ref_l] == MISSING_REF {
826 mv_a[ref_l]
827 } else {
828 let count = ((ref_a[ref_l] == ref_idx) as u8) + ((ref_b[ref_l] == ref_idx) as u8) + ((ref_c[ref_l] == ref_idx) as u8);
829 if count == 1 {
830 if ref_a[ref_l] == ref_idx {
831 mv_a[ref_l]
832 } else if ref_b[ref_l] == ref_idx {
833 mv_b[ref_l]
834 } else {
835 mv_c[ref_l]
836 }
837 } else {
838 MV::pred(mv_a[ref_l], mv_b[ref_l], mv_c[ref_l])
839 }
840 };
841 }
842 }
843 (mvs[0], refs[0], mvs[1], refs[1])
844 }
845 }
846 pub fn fill_mv(&mut self, xpos: usize, ypos: usize, bw: usize, bh: usize, ref_l: usize, mv: MV) {
847 let start = self.get_cur_blk4_idx(0) + xpos / 4 + ypos / 4 * self.blk4.stride;
848 for row in self.blk4.data[start..].chunks_mut(self.blk4.stride).take(bh / 4) {
849 for blk in row[..bw / 4].iter_mut() {
850 blk.mv[ref_l] = mv;
851 }
852 }
853 }
854 pub fn fill_mvd(&mut self, xpos: usize, ypos: usize, bw: usize, bh: usize, ref_l: usize, mv: MV) {
855 let mvd = MV{ x: mv.x.abs().min(128), y: mv.y.abs().min(128) };
856 let start = self.get_cur_blk4_idx(0) + xpos / 4 + ypos / 4 * self.blk4.stride;
857 for row in self.blk4.data[start..].chunks_mut(self.blk4.stride).take(bh / 4) {
858 for blk in row[..bw / 4].iter_mut() {
859 blk.mvd[ref_l] = mvd;
860 }
861 }
862 }
863 pub fn fill_ref(&mut self, xpos: usize, ypos: usize, bw: usize, bh: usize, ref_l: usize, ref_idx: PicRef) {
864 let start = self.get_cur_blk8_idx(0) + xpos / 8 + ypos / 8 * self.blk8.stride;
865 if bw < 8 || bh < 8 {
866 self.blk8.data[start].ref_idx[ref_l] = ref_idx;
867 } else {
868 for row in self.blk8.data[start..].chunks_mut(self.blk8.stride).take(bh / 8) {
869 for blk in row[..bw / 8].iter_mut() {
870 blk.ref_idx[ref_l] = ref_idx;
871 }
872 }
873 }
874 }
875 }
876
877 fn mvdiff4(mv1: MV, mv2: MV) -> bool {
878 let mv = mv1 - mv2;
879 (mv.x.abs() >= 4) || (mv.y.abs() >= 4)
880 }