1 use nihav_core::frame::NASimpleVideoFrame;
2 use nihav_codec_support::codecs::{MV, ZERO_MV};
3 use nihav_codec_support::data::GenericCache;
7 #[derive(Clone,Copy,Debug,PartialEq)]
14 #[derive(Clone,Copy,Debug,PartialEq)]
18 Intra16x16(u8, u8, u8),
37 pub fn is_intra(self) -> bool {
39 MBType::Intra4x4 | MBType::Intra8x8 | MBType::Intra16x16(_, _, _) | MBType::PCM => true,
43 pub fn is_intra16x16(self) -> bool {
44 if let MBType::Intra16x16(_, _, _) = self {
50 pub fn is_skip(self) -> bool {
52 MBType::PSkip | MBType::BSkip => true,
56 pub fn is_4x4(self) -> bool { self.num_parts() == 4 }
57 pub fn is_l0(self, part: usize) -> bool {
59 MBType::B16x16(mode) => mode == BMode::L0,
60 MBType::B16x8(mode0, mode1) | MBType::B8x16(mode0, mode1) => {
67 MBType::Direct | MBType::BSkip => false,
71 pub fn is_l1(self, part: usize) -> bool {
73 MBType::B16x16(mode) => mode == BMode::L1,
74 MBType::B16x8(mode0, mode1) | MBType::B8x16(mode0, mode1) => {
84 pub fn num_parts(self) -> usize {
86 MBType::Intra4x4 | MBType::Intra8x8 | MBType::Intra16x16(_, _, _) | MBType::PCM |
88 MBType::Direct | MBType::BSkip
93 MBType::P16x8 | MBType::P8x16 |
94 MBType::B16x8(_, _) | MBType::B8x16(_, _)
99 pub fn size(self) -> (usize, usize) {
103 MBType::Intra16x16(_, _, _) |
111 MBType::P16x8 | MBType::B16x8(_, _) => (16, 8),
112 MBType::P8x16 | MBType::B8x16(_, _) => (8, 16),
118 impl Default for MBType {
119 fn default() -> Self { MBType::Intra4x4 }
122 #[derive(Clone,Copy,Debug,PartialEq)]
136 pub fn num_parts(self) -> usize {
138 SubMBType::P8x8 | SubMBType::Direct8x8 | SubMBType::B8x8(_) => 1,
139 SubMBType::P4x4 | SubMBType::B4x4(_) => 4,
143 pub fn size(self) -> (usize, usize) {
145 SubMBType::P8x8 | SubMBType::Direct8x8 | SubMBType::B8x8(_) => (8, 8),
146 SubMBType::P8x4 | SubMBType::B8x4(_) => (8, 4),
147 SubMBType::P4x8 | SubMBType::B4x8(_) => (4, 8),
148 SubMBType::P4x4 | SubMBType::B4x4(_) => (4, 4),
151 pub fn is_l0(self) -> bool {
153 SubMBType::B8x8(mode) | SubMBType::B8x4(mode) |
154 SubMBType::B4x8(mode) | SubMBType::B4x4(mode) => {
160 pub fn is_l1(self) -> bool {
162 SubMBType::B8x8(mode) | SubMBType::B8x4(mode) |
163 SubMBType::B4x8(mode) | SubMBType::B4x4(mode) => {
171 impl Default for SubMBType {
172 fn default() -> Self { SubMBType::Direct8x8 }
176 #[derive(Clone,Copy,Debug,PartialEq)]
177 pub enum CompactMBType {
201 pub fn is_intra(self) -> bool {
203 CompactMBType::Intra4x4 | CompactMBType::Intra8x8 | CompactMBType::Intra16x16 => true,
207 pub fn is_intra16orpcm(self) -> bool {
209 CompactMBType::Intra16x16 | CompactMBType::PCM => true,
213 pub fn is_skip(self) -> bool {
215 CompactMBType::PSkip | CompactMBType::BSkip => true,
219 pub fn is_direct(self) -> bool {
221 CompactMBType::BSkip | CompactMBType::Direct | CompactMBType::None => true,
225 pub fn is_inter(self) -> bool {
226 !self.is_intra() && !self.is_skip() && self != CompactMBType::PCM
228 pub fn is_16x16(self) -> bool {
230 CompactMBType::P16x8 | CompactMBType::P8x16 |
231 CompactMBType::P8x8 | CompactMBType::P8x8Ref0 |
232 CompactMBType::B16x8 | CompactMBType::B8x16 |
233 CompactMBType::B8x8 => false,
239 impl Default for CompactMBType {
240 fn default() -> Self { CompactMBType::None }
243 impl From<MBType> for CompactMBType {
244 fn from(mbtype: MBType) -> Self {
246 MBType::Intra4x4 => CompactMBType::Intra4x4,
247 MBType::Intra8x8 => CompactMBType::Intra8x8,
248 MBType::Intra16x16(_, _, _) => CompactMBType::Intra16x16,
249 MBType::PCM => CompactMBType::PCM,
250 MBType::P16x16 => CompactMBType::P16x16,
251 MBType::P16x8 => CompactMBType::P16x8,
252 MBType::P8x16 => CompactMBType::P8x16,
253 MBType::P8x8 => CompactMBType::P8x8,
254 MBType::P8x8Ref0 => CompactMBType::P8x8Ref0,
255 MBType::PSkip => CompactMBType::PSkip,
256 MBType::Direct => CompactMBType::Direct,
257 MBType::B16x16(_) => CompactMBType::B16x16,
258 MBType::B16x8(_, _) => CompactMBType::B16x8,
259 MBType::B8x16(_, _) => CompactMBType::B8x16,
260 MBType::B8x8 => CompactMBType::B8x8,
261 MBType::BSkip => CompactMBType::BSkip,
267 #[derive(Clone,Copy,Debug,PartialEq)]
268 pub enum IntraPredMode {
282 pub fn is_none(self) -> bool { self == IntraPredMode::None }
283 pub fn into_pred_idx(self) -> i8 {
292 impl Default for IntraPredMode {
293 fn default() -> Self { IntraPredMode::None }
296 impl From<u8> for IntraPredMode {
297 fn from(val: u8) -> Self {
299 0 => IntraPredMode::Vertical,
300 1 => IntraPredMode::Horizontal,
301 2 => IntraPredMode::DC,
302 3 => IntraPredMode::DiagDownLeft,
303 4 => IntraPredMode::DiagDownRight,
304 5 => IntraPredMode::VerRight,
305 6 => IntraPredMode::HorDown,
306 7 => IntraPredMode::VerLeft,
307 8 => IntraPredMode::HorUp,
308 _ => IntraPredMode::None,
313 impl Into<u8> for IntraPredMode {
314 fn into(self) -> u8 {
316 IntraPredMode::Vertical => 0,
317 IntraPredMode::Horizontal => 1,
318 IntraPredMode::DC => 2,
319 IntraPredMode::DiagDownLeft => 3,
320 IntraPredMode::DiagDownRight => 4,
321 IntraPredMode::VerRight => 5,
322 IntraPredMode::HorDown => 6,
323 IntraPredMode::VerLeft => 7,
324 IntraPredMode::HorUp => 8,
330 pub const MISSING_POC: u16 = 0xFFFF;
332 #[derive(Clone,Copy,Debug)]
337 pub const MISSING_REF: PicRef = PicRef { ref_idx: 0xFF };
338 pub const INVALID_REF: PicRef = PicRef { ref_idx: 0xFE };
339 pub const ZERO_REF: PicRef = PicRef { ref_idx: 0 };
340 const DIRECT_FLAG: u8 = 0x40;
343 pub fn new(ref_idx: u8) -> Self {
346 pub fn not_avail(self) -> bool {
347 self == MISSING_REF || self == INVALID_REF
349 pub fn index(self) -> usize { (self.ref_idx & !DIRECT_FLAG) as usize }
350 pub fn is_direct(self) -> bool { (self.ref_idx & DIRECT_FLAG) != 0 }
351 pub fn set_direct(&mut self) { self.ref_idx |= DIRECT_FLAG; }
352 fn min_pos(self, other: Self) -> Self {
353 match (self.not_avail(), other.not_avail()) {
354 (true, true) => self,
355 (false, true) => self,
356 (true, false) => other,
357 (false, false) => PicRef::new((self.ref_idx & !DIRECT_FLAG).min(other.ref_idx & !DIRECT_FLAG)),
362 impl Default for PicRef {
363 fn default() -> Self { MISSING_REF }
366 impl PartialEq for PicRef {
367 fn eq(&self, other: &Self) -> bool {
368 (self.ref_idx | DIRECT_FLAG) == (other.ref_idx | DIRECT_FLAG)
372 impl std::fmt::Display for PicRef {
373 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
374 if *self == MISSING_REF {
376 } else if *self == INVALID_REF {
379 write!(f, "{}", self.ref_idx & !DIRECT_FLAG)
384 #[derive(Clone,Copy,Default)]
386 pub mb_type: CompactMBType,
388 pub coded_flags: u32,
393 pub transform_8x8: bool,
396 pub fn blk4_to_blk8(blk4: usize) -> usize {
397 const MAP: [usize; 16] = [ 0, 0, 1, 1, 0, 0, 1, 1, 2, 2, 3, 3, 2, 2, 3, 3 ];
401 #[derive(Clone,Copy)]
402 pub struct Blk8Data {
403 pub ref_idx: [PicRef; 2],
404 pub ncoded_c: [u8; 2],
407 impl Default for Blk8Data {
408 fn default() -> Self {
410 ref_idx: [MISSING_REF; 2],
416 #[derive(Clone,Copy,Default)]
417 pub struct Blk4Data {
419 pub ipred: IntraPredMode,
424 pub struct SliceState {
431 pub mb: GenericCache<MBData>,
432 pub blk8: GenericCache<Blk8Data>,
433 pub blk4: GenericCache<Blk4Data>,
435 pub deblock: [u8; 16],
440 pub top_line_y: Vec<u8>,
441 pub left_y: [u8; 17], // first element is top-left
442 pub top_line_c: [Vec<u8>; 2],
443 pub left_c: [[u8; 9]; 2],
447 pub fn new() -> Self {
454 mb: GenericCache::new(0, 0, MBData::default()),
455 blk8: GenericCache::new(0, 0, Blk8Data::default()),
456 blk4: GenericCache::new(0, 0, Blk4Data::default()),
463 top_line_y: Vec::new(),
465 top_line_c: [Vec::new(), Vec::new()],
469 pub fn reset(&mut self, mb_w: usize, mb_h: usize, mb_pos: usize) {
472 self.mb_start = mb_pos;
474 self.mb_x = mb_pos % mb_w;
475 self.mb_y = mb_pos / mb_w;
480 self.mb = GenericCache::new(1, mb_w + 2, MBData::default());
481 self.blk8 = GenericCache::new(2, mb_w * 2 + 2, Blk8Data::default());
482 self.blk4 = GenericCache::new(4, mb_w * 4 + 2, Blk4Data::default());
484 self.has_top = false;
485 self.has_left = false;
487 self.top_line_y.resize(mb_w * 16 + 1, 0x80);
488 self.top_line_c[0].resize(mb_w * 8 + 1, 0x80);
489 self.top_line_c[1].resize(mb_w * 8 + 1, 0x80);
490 self.left_y = [0x80; 17];
491 self.left_c = [[0x80; 9]; 2];
493 pub fn save_ipred_context(&mut self, frm: &NASimpleVideoFrame<u8>) {
494 let dstoff = self.mb_x * 16;
495 let srcoff = frm.offset[0] + self.mb_x * 16 + self.mb_y * 16 * frm.stride[0];
496 self.left_y[0] = self.top_line_y[dstoff + 15];
497 self.top_line_y[dstoff..][..16].copy_from_slice(&frm.data[srcoff + frm.stride[0] * 15..][..16]);
498 for (dst, src) in self.left_y[1..].iter_mut().zip(frm.data[srcoff..].chunks(frm.stride[0])) {
502 let cstride = frm.stride[chroma + 1];
503 let dstoff = self.mb_x * 8;
504 let srcoff = frm.offset[chroma + 1] + self.mb_x * 8 + self.mb_y * 8 * cstride;
505 self.left_c[chroma][0] = self.top_line_c[chroma][dstoff + 7];
506 self.top_line_c[chroma][dstoff..][..8].copy_from_slice(&frm.data[srcoff + cstride * 7..][..8]);
507 for (dst, src) in self.left_c[chroma][1..].iter_mut().zip(frm.data[srcoff..].chunks(cstride)) {
512 pub fn fill_deblock(&mut self, frefs: &FrameRefs, deblock_mode: u8, is_s: bool) {
513 if deblock_mode == 1 {
517 self.deblock = [0; 16];
519 let tx8x8 = self.get_cur_mb().transform_8x8;
521 let cur_mbt = self.get_cur_mb().mb_type;
522 let left_mbt = self.get_left_mb().mb_type;
523 let mut top_mbt = self.get_top_mb().mb_type;
525 let can_do_top = y != 0 || (self.mb_y != 0 && (self.has_top || deblock_mode != 2));
526 if can_do_top && (!tx8x8 || (y & 1) == 0) {
527 if is_s || cur_mbt.is_intra() || top_mbt.is_intra() {
528 let val = if y == 0 { 0x40 } else { 0x30 };
529 for el in self.deblock[y * 4..][..4].iter_mut() { *el |= val; }
532 let blk4 = x + y * 4;
533 let blk8 = x / 2 + (y / 2) * 2;
534 if self.get_cur_blk4(blk4).ncoded != 0 || self.get_top_blk4(blk4).ncoded != 0 {
535 self.deblock[y * 4 + x] |= 0x20;
537 let cur_mv = self.get_cur_blk4(blk4).mv;
538 let top_mv = self.get_top_blk4(blk4).mv;
539 let cur_ref = self.get_cur_blk8(blk8).ref_idx;
540 let top_ref = if (y & 1) == 0 { self.get_top_blk8(blk8).ref_idx } else { cur_ref };
541 if mvdiff4(cur_mv[0], top_mv[0]) || mvdiff4(cur_mv[1], top_mv[1]) || !frefs.cmp_refs(cur_ref, top_ref) {
542 self.deblock[y * 4 + x] |= 0x10;
548 let mut lleft_mbt = left_mbt;
550 let skip_8 = tx8x8 && (x & 1) != 0;
551 let can_do_left = x > 0 || self.has_left || (self.mb_x != 0 && deblock_mode != 2);
555 let blk4 = x + y * 4;
556 let blk8 = x / 2 + (y / 2) * 2;
558 } else if is_s || cur_mbt.is_intra() || lleft_mbt.is_intra() {
559 self.deblock[y * 4 + x] |= if x == 0 { 4 } else { 3 };
560 } else if self.get_cur_blk4(blk4).ncoded != 0 || self.get_left_blk4(blk4).ncoded != 0 {
561 self.deblock[y * 4 + x] |= 2;
563 let cur_mv = self.get_cur_blk4(blk4).mv;
564 let left_mv = self.get_left_blk4(blk4).mv;
565 let cur_ref = self.get_cur_blk8(blk8).ref_idx;
566 let left_ref = if (x & 1) == 0 { self.get_left_blk8(blk8).ref_idx } else { cur_ref };
567 if mvdiff4(cur_mv[0], left_mv[0]) || mvdiff4(cur_mv[1], left_mv[1]) || !frefs.cmp_refs(cur_ref, left_ref) {
568 self.deblock[y * 4 + x] |= 1;
576 pub fn next_mb(&mut self) {
578 self.has_left = true;
579 if self.mb_x == self.mb_w {
582 self.mb.update_row();
583 self.blk8.update_row();
584 self.blk4.update_row();
586 self.has_left = false;
588 self.has_top = self.mb_x + self.mb_y * self.mb_w >= self.mb_start + self.mb_w;
590 pub fn get_cur_mb_idx(&self) -> usize { self.mb.xpos + self.mb_x }
591 pub fn get_cur_blk8_idx(&self, blk_no: usize) -> usize {
592 self.blk8.xpos + self.mb_x * 2 + (blk_no & 1) + (blk_no >> 1) * self.blk8.stride
594 pub fn get_cur_blk4_idx(&self, blk_no: usize) -> usize {
595 self.blk4.xpos + self.mb_x * 4 + (blk_no & 3) + (blk_no >> 2) * self.blk4.stride
597 pub fn get_cur_mb(&mut self) -> &mut MBData {
598 let idx = self.get_cur_mb_idx();
599 &mut self.mb.data[idx]
601 pub fn get_left_mb(&self) -> &MBData {
602 &self.mb.data[self.get_cur_mb_idx() - 1]
604 pub fn get_top_mb(&self) -> &MBData {
605 &self.mb.data[self.get_cur_mb_idx() - self.mb.stride]
607 pub fn get_cur_blk8(&mut self, blk_no: usize) -> &mut Blk8Data {
608 let idx = self.get_cur_blk8_idx(blk_no);
609 &mut self.blk8.data[idx]
611 pub fn get_left_blk8(&self, blk_no: usize) -> &Blk8Data {
612 &self.blk8.data[self.get_cur_blk8_idx(blk_no) - 1]
614 pub fn get_top_blk8(&self, blk_no: usize) -> &Blk8Data {
615 &self.blk8.data[self.get_cur_blk8_idx(blk_no) - self.blk8.stride]
617 pub fn get_cur_blk4(&mut self, blk_no: usize) -> &mut Blk4Data {
618 let idx = self.get_cur_blk4_idx(blk_no);
619 &mut self.blk4.data[idx]
621 pub fn get_left_blk4(&self, blk_no: usize) -> &Blk4Data {
622 &self.blk4.data[self.get_cur_blk4_idx(blk_no) - 1]
624 pub fn get_top_blk4(&self, blk_no: usize) -> &Blk4Data {
625 &self.blk4.data[self.get_cur_blk4_idx(blk_no) - self.blk4.stride]
628 pub fn apply_to_blk8<F: (Fn(&mut Blk8Data))>(&mut self, f: F) {
629 let start = self.get_cur_blk8_idx(0);
630 for row in self.blk8.data[start..].chunks_mut(self.blk8.stride).take(2) {
631 for el in row[..2].iter_mut() {
636 pub fn apply_to_blk4<F: (Fn(&mut Blk4Data))>(&mut self, f: F) {
637 let start = self.get_cur_blk4_idx(0);
638 for row in self.blk4.data[start..].chunks_mut(self.blk4.stride).take(4) {
639 for el in row[..4].iter_mut() {
645 pub fn fill_ipred(&mut self, imode: IntraPredMode) {
646 self.apply_to_blk4(|blk| blk.ipred = imode);
648 pub fn fill_ncoded(&mut self, nc: u8) {
649 self.apply_to_blk4(|blk| blk.ncoded = nc);
650 self.apply_to_blk8(|blk| blk.ncoded_c = [nc; 2]);
652 pub fn reset_mb_mv(&mut self) {
653 self.apply_to_blk8(|blk| blk.ref_idx = [INVALID_REF; 2]);
656 pub fn get_mv_ctx(&self, xoff: usize, yoff: usize, ref_l: usize) -> (usize, usize) {
657 let blk_no = xoff / 4 + yoff;
658 let mv_a = self.get_left_blk4(blk_no).mvd[ref_l];
659 let mv_b = self.get_top_blk4(blk_no).mvd[ref_l];
660 let mv = mv_a + mv_b;
661 let ctx0 = if mv.x < 3 { 0 } else if mv.x <= 32 { 1 } else { 2 };
662 let ctx1 = if mv.y < 3 { 0 } else if mv.y <= 32 { 1 } else { 2 };
665 pub fn get_mv_ref_ctx(&self, xoff: usize, yoff: usize, ref_l: usize) -> usize {
666 let blk_no = xoff / 8 + (yoff / 8) * 2;
668 let left_ref = self.get_left_blk8(blk_no).ref_idx[ref_l];
669 let top_ref = self.get_top_blk8(blk_no).ref_idx[ref_l];
670 if !left_ref.not_avail() && !left_ref.is_direct() && left_ref.index() > 0 {
673 if !top_ref.not_avail() && !top_ref.is_direct() && top_ref.index() > 0 {
678 #[allow(clippy::if_same_then_else)]
679 pub fn predict(&mut self, xpos: usize, ypos: usize, bw: usize, bh: usize, ref_l: usize, diff_mv: MV, ref_idx: PicRef) {
680 let midx = self.get_cur_blk4_idx(0) + xpos / 4 + ypos / 4 * self.blk4.stride;
681 let ridx = self.get_cur_blk8_idx(0) + xpos / 8 + ypos / 8 * self.blk8.stride;
682 let ridx_c = self.get_cur_blk8_idx(0) + (xpos + bw) / 8 + ypos / 8 * self.blk8.stride - if (ypos & 4) == 0 { self.blk8.stride } else { 0 };
684 let mv_a = self.blk4.data[midx - 1].mv[ref_l];
685 let mv_b = self.blk4.data[midx - self.blk4.stride].mv[ref_l];
686 let mut mv_c = self.blk4.data[midx - self.blk4.stride + bw / 4].mv[ref_l];
688 let rx = if (xpos & 4) != 0 { 0 } else { 1 };
689 let ry = if (ypos & 4) != 0 { 0 } else { self.blk8.stride };
690 let ref_a = self.blk8.data[ridx - rx].ref_idx[ref_l];
691 let ref_b = self.blk8.data[ridx - ry].ref_idx[ref_l];
692 let mut ref_c = self.blk8.data[ridx_c].ref_idx[ref_l];
694 if ref_c == MISSING_REF || (((xpos + bw) & 4) == 0 && (ypos & 4) != 0) {
695 mv_c = self.blk4.data[midx - self.blk4.stride - 1].mv[ref_l];
696 ref_c = self.blk8.data[ridx - rx - ry].ref_idx[ref_l];
699 let pred_mv = if bw == 16 && bh == 8 && ypos == 0 && ref_b == ref_idx {
701 } else if bw == 16 && bh == 8 && ypos != 0 && ref_a == ref_idx {
703 } else if bw == 8 && bh == 16 && xpos == 0 && ref_a == ref_idx {
705 } else if bw == 8 && bh == 16 && xpos != 0 && ref_c == ref_idx {
707 } else if ref_b == MISSING_REF && ref_c == MISSING_REF {
710 let count = ((ref_a == ref_idx) as u8) + ((ref_b == ref_idx) as u8) + ((ref_c == ref_idx) as u8);
712 if ref_a == ref_idx {
714 } else if ref_b == ref_idx {
720 MV::pred(mv_a, mv_b, mv_c)
724 let mv = pred_mv + diff_mv;
725 self.fill_mv (xpos, ypos, bw, bh, ref_l, mv);
726 self.fill_ref(xpos, ypos, bw, bh, ref_l, ref_idx);
728 pub fn predict_pskip(&mut self) {
729 let midx = self.get_cur_blk4_idx(0);
730 let ridx = self.get_cur_blk8_idx(0);
732 let mv_a = self.blk4.data[midx - 1].mv[0];
733 let mv_b = self.blk4.data[midx - self.blk4.stride].mv[0];
734 let mut mv_c = self.blk4.data[midx - self.blk4.stride + 4].mv[0];
736 let ref_a = self.blk8.data[ridx - 1].ref_idx[0];
737 let ref_b = self.blk8.data[ridx - self.blk8.stride].ref_idx[0];
738 let mut ref_c = self.blk8.data[ridx - self.blk8.stride + 2].ref_idx[0];
740 if ref_c == MISSING_REF {
741 mv_c = self.blk4.data[midx - self.blk4.stride - 1].mv[0];
742 ref_c = self.blk8.data[ridx - self.blk8.stride - 1].ref_idx[0];
745 let ref_idx = ZERO_REF;
746 let mv = if ref_a == MISSING_REF || ref_b == MISSING_REF || (ref_a == ZERO_REF && mv_a == ZERO_MV) || (ref_b == ZERO_REF && mv_b == ZERO_MV) {
749 let count = ((ref_a == ref_idx) as u8) + ((ref_b == ref_idx) as u8) + ((ref_c == ref_idx) as u8);
751 if ref_a == ref_idx {
753 } else if ref_b == ref_idx {
759 MV::pred(mv_a, mv_b, mv_c)
763 self.fill_mv (0, 0, 16, 16, 0, mv);
764 self.fill_ref(0, 0, 16, 16, 0, ref_idx);
766 pub fn predict_direct_mb(&mut self, frame_refs: &FrameRefs, temporal_mv: bool, cur_id: u16) {
767 let (col_mb, _, _) = frame_refs.get_colocated_info(self.mb_x, self.mb_y);
768 if col_mb.mb_type.is_16x16() || !temporal_mv {
769 let (mv0, ref0, mv1, ref1) = self.get_direct_mv(frame_refs, temporal_mv, cur_id, 0);
770 self.apply_to_blk4(|blk4| blk4.mv = [mv0, mv1]);
771 self.apply_to_blk8(|blk8| blk8.ref_idx = [ref0, ref1]);
774 let (mv0, ref0, mv1, ref1) = self.get_direct_mv(frame_refs, temporal_mv, cur_id, blk4);
775 self.get_cur_blk4(blk4).mv = [mv0, mv1];
776 self.get_cur_blk8(blk4_to_blk8(blk4)).ref_idx = [ref0, ref1];
780 pub fn predict_direct_sub(&mut self, frame_refs: &FrameRefs, temporal_mv: bool, cur_id: u16, blk4: usize) {
781 let (mv0, ref0, mv1, ref1) = self.get_direct_mv(frame_refs, temporal_mv, cur_id, blk4);
782 self.get_cur_blk4(blk4).mv = [mv0, mv1];
783 self.get_cur_blk8(blk4_to_blk8(blk4)).ref_idx = [ref0, ref1];
785 pub fn get_direct_mv(&self, frame_refs: &FrameRefs, temporal_mv: bool, cur_id: u16, blk4: usize) -> (MV, PicRef, MV, PicRef) {
786 let (mbi, r1_poc, r1_long) = frame_refs.get_colocated_info(self.mb_x, self.mb_y);
787 let blk8 = blk4_to_blk8(blk4);
788 let (col_mv, r0_poc, col_idx) = if mbi.ref_poc[blk8] == [MISSING_POC; 2] {
789 (ZERO_MV, MISSING_POC, MISSING_REF)
790 } else if mbi.ref_poc[blk8][0] != MISSING_POC {
791 (mbi.mv[blk4][0], mbi.ref_poc[blk8][0], mbi.ref_idx[blk8][0])
793 (mbi.mv[blk4][1], mbi.ref_poc[blk8][1], mbi.ref_idx[blk8][1])
795 let (col_ref, r0_long) = frame_refs.map_ref0(r0_poc);
797 let td = (i32::from(r1_poc) - i32::from(r0_poc)).max(-128).min(127);
798 if r0_long || td == 0 {
799 (col_mv, col_ref, ZERO_MV, ZERO_REF)
801 let tx = (16384 + (td / 2).abs()) / td;
802 let tb = (i32::from(cur_id) - i32::from(r0_poc)).max(-128).min(127);
803 let scale = ((tb * tx + 32) >> 6).max(-1024).min(1023);
805 x: ((i32::from(col_mv.x) * scale + 128) >> 8) as i16,
806 y: ((i32::from(col_mv.y) * scale + 128) >> 8) as i16,
808 let mv1 = mv0 - col_mv;
809 (mv0, col_ref, mv1, ZERO_REF)
812 let blk4 = 0; // we generate the same MV prediction for the whole MB
813 let blk8 = blk4_to_blk8(blk4);
814 let midx = self.get_cur_blk4_idx(blk4);
815 let ridx = self.get_cur_blk8_idx(blk8);
816 let ridx_c = self.get_cur_blk8_idx(blk8) + 16 / 8 - self.blk8.stride;
818 let mv_a = self.blk4.data[midx - 1].mv;
819 let mv_b = self.blk4.data[midx - self.blk4.stride].mv;
820 let mut mv_c = self.blk4.data[midx - self.blk4.stride + 16 / 4].mv;
822 let ref_a = self.blk8.data[ridx - 1].ref_idx;
823 let ref_b = self.blk8.data[ridx - self.blk8.stride].ref_idx;
824 let mut ref_c = self.blk8.data[ridx_c].ref_idx;
826 if ref_c == [MISSING_REF; 2] {
827 mv_c = self.blk4.data[midx - self.blk4.stride - 1].mv;
828 ref_c = self.blk8.data[ridx - self.blk8.stride - 1].ref_idx;
830 let mut refs = [INVALID_REF; 2];
831 for cur_ref in [ref_a, ref_b, ref_c].iter() {
832 refs[0] = refs[0].min_pos(cur_ref[0]);
833 refs[1] = refs[1].min_pos(cur_ref[1]);
835 if refs == [INVALID_REF; 2] {
836 return (ZERO_MV, ZERO_REF, ZERO_MV, ZERO_REF);
839 let mut col_zero = true;
840 if r1_long || col_idx != ZERO_REF {
843 if col_mv.x.abs() > 1 || col_mv.y.abs() > 1 {
846 let mut mvs = [ZERO_MV; 2];
848 if mbi.mb_type.is_intra() || (!refs[ref_l].not_avail() && !(refs[ref_l] == ZERO_REF && col_zero)) {
849 let ref_idx = refs[ref_l];
850 mvs[ref_l] = if ref_b[ref_l] == MISSING_REF && ref_c[ref_l] == MISSING_REF {
853 let count = ((ref_a[ref_l] == ref_idx) as u8) + ((ref_b[ref_l] == ref_idx) as u8) + ((ref_c[ref_l] == ref_idx) as u8);
855 if ref_a[ref_l] == ref_idx {
857 } else if ref_b[ref_l] == ref_idx {
863 MV::pred(mv_a[ref_l], mv_b[ref_l], mv_c[ref_l])
868 (mvs[0], refs[0], mvs[1], refs[1])
871 pub fn fill_mv(&mut self, xpos: usize, ypos: usize, bw: usize, bh: usize, ref_l: usize, mv: MV) {
872 let start = self.get_cur_blk4_idx(0) + xpos / 4 + ypos / 4 * self.blk4.stride;
873 for row in self.blk4.data[start..].chunks_mut(self.blk4.stride).take(bh / 4) {
874 for blk in row[..bw / 4].iter_mut() {
879 pub fn fill_mvd(&mut self, xpos: usize, ypos: usize, bw: usize, bh: usize, ref_l: usize, mv: MV) {
880 let mvd = MV{ x: mv.x.abs().min(128), y: mv.y.abs().min(128) };
881 let start = self.get_cur_blk4_idx(0) + xpos / 4 + ypos / 4 * self.blk4.stride;
882 for row in self.blk4.data[start..].chunks_mut(self.blk4.stride).take(bh / 4) {
883 for blk in row[..bw / 4].iter_mut() {
884 blk.mvd[ref_l] = mvd;
888 pub fn fill_ref(&mut self, xpos: usize, ypos: usize, bw: usize, bh: usize, ref_l: usize, ref_idx: PicRef) {
889 let start = self.get_cur_blk8_idx(0) + xpos / 8 + ypos / 8 * self.blk8.stride;
890 if bw < 8 || bh < 8 {
891 self.blk8.data[start].ref_idx[ref_l] = ref_idx;
893 for row in self.blk8.data[start..].chunks_mut(self.blk8.stride).take(bh / 8) {
894 for blk in row[..bw / 8].iter_mut() {
895 blk.ref_idx[ref_l] = ref_idx;
902 fn mvdiff4(mv1: MV, mv2: MV) -> bool {
904 (mv.x.abs() >= 4) || (mv.y.abs() >= 4)