1 use nihav_core::codecs::*;
2 use nihav_core::io::byteio::*;
3 use nihav_core::data::GenericCache;
4 use super::vpcommon::*;
8 enum VPTreeDef<T: Copy> {
14 fn read_tree<T:Copy>(&mut self, tree_def: &[VPTreeDef<T>], tree_prob: &[u8]) -> T;
17 impl<'a> VPTreeReader for BoolCoder<'a> {
18 fn read_tree<T:Copy>(&mut self, tree_def: &[VPTreeDef<T>], tree_prob: &[u8]) -> T {
22 let bit = self.read_prob(tree_prob[idx >> 1]);
23 match tree_def[idx + (bit as usize)] {
24 VPTreeDef::Value(v) => return v,
25 VPTreeDef::Index(ix) => { idx = ix as usize; },
32 #[derive(Clone,Copy,PartialEq,Debug)]
40 //sub-block prediction modes
51 impl Default for PredMode {
52 fn default() -> Self { PredMode::DCPred }
56 fn to_b_mode(self) -> Self {
57 if self == PredMode::DCPred {
63 fn to_b_index(self) -> usize {
65 PredMode::DCPred => 0,
66 PredMode::TMPred => 1,
69 PredMode::LDPred => 4,
70 PredMode::RDPred => 5,
71 PredMode::VRPred => 6,
72 PredMode::VLPred => 7,
73 PredMode::HDPred => 8,
74 PredMode::HUPred => 9,
80 const PITCH_MODE_NORMAL: u8 = 0;
81 const PITCH_MODE_FOUR: u8 = 1;
82 const PITCH_MODE_X2: u8 = 2;
83 const PITCH_MODE_X4: u8 = 3;
85 #[derive(Clone,Copy,Default)]
92 #[derive(Clone,Copy,PartialEq)]
108 fn expand_token(bc: &mut BoolCoder, token: DCTToken) -> i16 {
111 DCTToken::Zero => return 0,
112 DCTToken::One => return if bc.read_bool() { -1 } else { 1 },
113 DCTToken::Two => return if bc.read_bool() { -2 } else { 2 },
114 DCTToken::Three => return if bc.read_bool() { -3 } else { 3 },
115 DCTToken::Four => return if bc.read_bool() { -4 } else { 4 },
116 DCTToken::Cat1 => cat = 0,
117 DCTToken::Cat2 => cat = 1,
118 DCTToken::Cat3 => cat = 2,
119 DCTToken::Cat4 => cat = 3,
120 DCTToken::Cat5 => cat = 4,
121 DCTToken::Cat6 => cat = 5,
125 let add_probs = &VP56_COEF_ADD_PROBS[cat];
126 for prob in add_probs.iter() {
127 if *prob == 128 { break; }
128 add = (add << 1) | (bc.read_prob(*prob) as i16);
130 let sign = bc.read_bool();
131 let level = VP56_COEF_BASE[cat] + add;
139 struct SBParams<'a> {
140 coef_probs: &'a [[[[u8; 11]; 3]; 8]; 4],
141 scan: &'a [usize; 16],
145 fn decode_subblock<'a>(bc: &mut BoolCoder, coeffs: &mut [i16; 16], ctype: usize, pctx: u8, sbparams: &SBParams) -> u8 {
146 const COEF_BANDS: [usize; 16] = [ 0, 1, 2, 3, 6, 4, 5, 6, 6, 6, 6, 6, 6, 6, 6, 7 ];
149 let start = if ctype != 0 { 0 } else { 1 };
151 let mut cval = pctx as usize;
152 for idx in start..16 {
153 let probs = &sbparams.coef_probs[ctype][COEF_BANDS[idx]][cval];
154 let tok = bc.read_tree(COEF_TREE, probs);
155 if tok == DCTToken::EOB { break; }
156 let level = expand_token(bc, tok);
157 coeffs[sbparams.scan[idx]] = level.wrapping_mul(sbparams.qmat[idx]);
158 cval = level.abs().min(2) as usize;
161 if has_nz > 0 { 1 } else { 0 }
164 #[derive(Clone,Copy,Default)]
174 struct DecoderState {
175 features: [Option<MBFeature>; 4],
182 loop_filter_level: u8,
188 kf_ymode_prob: [u8; 4],
189 kf_uvmode_prob: [u8; 3],
194 coef_probs: [[[[u8; 11]; 3]; 8]; 4],
195 mv_probs: [[u8; 17]; 2],
197 force_quant: Option<u8>,
198 force_loop_str: Option<u8>,
199 force_gf_update: bool,
200 force_pitch: Option<u8>,
204 pdc_pred_count: usize,
206 ipred_ctx_y: IPredContext,
207 ipred_ctx_u: IPredContext,
208 ipred_ctx_v: IPredContext,
212 fn reset(&mut self) {
213 self.kf_ymode_prob.copy_from_slice(Y_MODE_TREE_PROBS);
214 self.kf_uvmode_prob.copy_from_slice(UV_MODE_TREE_PROBS);
215 self.coef_probs.copy_from_slice(&DEFAULT_DCT_PROBS);
216 self.mv_probs.copy_from_slice(&DEFAULT_MV_PROBS);
220 #[derive(Clone,Copy,Debug,PartialEq)]
228 #[derive(Clone,Copy,Debug,PartialEq)]
236 fn decode_mv_component(bc: &mut BoolCoder, probs: &[u8; 17]) -> i16 {
237 const LONG_VECTOR_ORDER: [usize; 7] = [ 0, 1, 2, 7, 6, 5, 4 ];
239 let val = if !bc.read_prob(probs[0]) {
240 bc.read_tree(SMALL_MV_TREE, &probs[2..9])
242 let raw_probs = &probs[9..];
244 for ord in LONG_VECTOR_ORDER.iter() {
245 raw |= (bc.read_prob(raw_probs[*ord]) as i16) << *ord;
247 if (raw & 0xF0) != 0 {
248 raw |= (bc.read_prob(raw_probs[3]) as i16) << 3;
254 if (val == 0) || !bc.read_prob(probs[1]) {
262 y_pred: GenericCache<u8>,
263 u_pred: GenericCache<u8>,
264 v_pred: GenericCache<u8>,
265 y2_pred: GenericCache<u8>,
266 y_pred_left: [u8; 4],
267 u_pred_left: [u8; 2],
268 v_pred_left: [u8; 2],
275 y_pred: GenericCache::new(1, 1, 0),
276 u_pred: GenericCache::new(1, 1, 0),
277 v_pred: GenericCache::new(1, 1, 0),
278 y2_pred: GenericCache::new(1, 1, 0),
285 fn resize(&mut self, mb_w: usize) {
286 self.y_pred = GenericCache::new(4, mb_w * 4 + 1, 0);
287 self.u_pred = GenericCache::new(2, mb_w * 2 + 1, 0);
288 self.v_pred = GenericCache::new(2, mb_w * 2 + 1, 0);
289 self.y2_pred = GenericCache::new(1, mb_w + 1, 0);
291 fn reset(&mut self) {
295 self.y2_pred.reset();
296 self.y_pred_left = [0; 4];
297 self.u_pred_left = [0; 2];
298 self.v_pred_left = [0; 2];
299 self.y2_pred_left = 0;
301 fn update_row(&mut self) {
302 self.y_pred.update_row();
303 self.u_pred.update_row();
304 self.v_pred.update_row();
305 self.y2_pred.update_row();
310 info: NACodecInfoRef,
317 mb_info: Vec<MBInfo>,
321 ymodes: Vec<PredMode>,
323 uvmodes: Vec<PredMode>,
324 uvmode_stride: usize,
326 dstate: DecoderState,
329 coeffs: [[i16; 16]; 25],
331 qmat: [[[i16; 16]; 3]; 5],
333 mc_buf: NAVideoBufferRef<u8>,
335 tmp_scan: [usize; 16],
340 let vt = alloc_video_buffer(NAVideoInfo::new(128, 128, false, YUV420_FORMAT), 4).unwrap();
341 let mut scan = [0; 16];
342 scan.copy_from_slice(&DEFAULT_SCAN_ORDER);
343 let mc_buf = vt.get_vbuf().unwrap();
345 info: NACodecInfoRef::default(),
347 shuf: VPShuffler::new(),
361 dstate: DecoderState::default(),
362 pcache: PredCache::new(),
364 coeffs: [[0; 16]; 25],
367 qmat: [[[0; 16]; 3]; 5],
372 fn set_dimensions(&mut self, width: usize, height: usize) {
373 if (width == self.width) && (height == self.height) {
377 self.height = height;
378 self.mb_w = (self.width + 15) >> 4;
379 self.mb_h = (self.height + 15) >> 4;
380 self.mb_info.resize(self.mb_w * self.mb_h, MBInfo::default());
381 self.mv_stride = self.mb_w * 4;
382 self.mvs.resize(self.mv_stride * self.mb_h * 4, ZERO_MV);
384 self.ymode_stride = self.mb_w * 4;
385 self.uvmode_stride = self.mb_w;
386 self.ymodes.resize(self.ymode_stride * self.mb_h * 4, PredMode::default());
387 self.uvmodes.resize(self.uvmode_stride * self.mb_h, PredMode::default());
389 self.pcache.resize(self.mb_w);
391 fn read_features(&mut self, bc: &mut BoolCoder) -> DecoderResult<()> {
392 for (i, feat) in self.dstate.features.iter_mut().enumerate() {
394 let mut feature = MBFeature::default();
395 feature.present_prob = bc.read_byte();
396 for tp in feature.tree_probs.iter_mut() {
398 *tp = bc.read_byte();
404 let fbits = match i {
407 _ => if self.dstate.version == 0 { 8 } else { 5 },
409 for dval in feature.def_val.iter_mut() {
411 *dval = bc.read_bits(fbits) as u8;
417 *feat = Some(feature);
424 fn read_dct_coef_prob_upd(&mut self, bc: &mut BoolCoder) -> DecoderResult<()> {
429 if bc.read_prob(DCT_UPDATE_PROBS[i][j][k][l]) {
430 self.dstate.coef_probs[i][j][k][l] = bc.read_byte();
438 fn read_mv_prob_upd(&mut self, bc: &mut BoolCoder) -> DecoderResult<()> {
441 if bc.read_prob(MV_UPDATE_PROBS[comp][i]) {
442 self.dstate.mv_probs[comp][i] = bc.read_probability();
448 fn decode_mb_features(&mut self, bc: &mut BoolCoder, _mb_x: usize, _mb_y: usize) -> DecoderResult<()> {
449 self.dstate.force_quant = None;
450 self.dstate.force_loop_str = None;
451 self.dstate.force_gf_update = false;
452 self.dstate.force_pitch = None;
453 for (i, feat) in self.dstate.features.iter().enumerate() {
454 if let Some(feat) = feat {
455 let present = bc.read_prob(feat.present_prob);
457 let ftype_idx = bc.read_tree(FEATURE_TREE, &feat.tree_probs);
458 let val = feat.def_val[ftype_idx];
460 0 => self.dstate.force_quant = Some(ftype_idx as u8),
461 1 => self.dstate.force_loop_str = Some(val),
462 2 => self.dstate.force_gf_update = true,
463 _ => self.dstate.force_pitch = Some(val),
470 fn decode_residue(&mut self, bc: &mut BoolCoder, mb_x: usize, mb_idx: usize) {
471 let qmat_idx = if let Some(idx) = self.dstate.force_quant { (idx as usize) + 1 } else { 0 };
472 let mut sbparams = SBParams {
473 scan: &DEFAULT_SCAN_ORDER,
474 qmat: &self.qmat[qmat_idx][2],
475 coef_probs: &self.dstate.coef_probs,
477 let mut has_ac = [false; 25];
479 if self.dstate.has_y2 {
480 let pred = &self.pcache.y2_pred;
481 let pidx = pred.xpos + mb_x;
482 let pctx = self.pcache.y2_pred_left + pred.data[pidx - pred.stride];
484 let has_nz = decode_subblock(bc, &mut self.coeffs[24], 1, pctx, &sbparams);
485 self.pcache.y2_pred.data[pidx] = has_nz;
486 self.pcache.y2_pred_left = has_nz;
487 has_ac[24] = has_nz > 0;
491 let pred = &mut self.pcache.y2_pred;
492 let pidx = pred.xpos + mb_x;
493 pred.data[pidx] = pred.data[pidx - pred.stride];
497 sbparams.scan = &self.scan;
498 sbparams.qmat = &self.qmat[qmat_idx][0];
502 let pred = &self.pcache.y_pred;
503 let pidx = pred.xpos + mb_x * 4 + bx + by * pred.stride;
504 let pctx = self.pcache.y_pred_left[by] + pred.data[pidx - pred.stride];
506 let has_nz = decode_subblock(bc, &mut self.coeffs[i], ytype, pctx, &sbparams);
507 self.pcache.y_pred.data[pidx] = has_nz;
508 self.pcache.y_pred_left[by] = has_nz;
509 has_ac[i] = has_nz > 0;
511 sbparams.qmat = &self.qmat[qmat_idx][1];
514 let by = (i >> 1) & 1;
515 let pred = &self.pcache.u_pred;
516 let pidx = pred.xpos + mb_x * 2 + bx + by * pred.stride;
517 let pctx = self.pcache.u_pred_left[by] + pred.data[pidx - pred.stride];
519 let has_nz = decode_subblock(bc, &mut self.coeffs[i], 2, pctx, &sbparams);
520 self.pcache.u_pred.data[pidx] = has_nz;
521 self.pcache.u_pred_left[by] = has_nz;
522 has_ac[i] = has_nz > 0;
526 let by = (i >> 1) & 1;
527 let pred = &self.pcache.v_pred;
528 let pidx = pred.xpos + mb_x * 2 + bx + by * pred.stride;
529 let pctx = self.pcache.v_pred_left[by] + pred.data[pidx - pred.stride];
531 let has_nz = decode_subblock(bc, &mut self.coeffs[i], 2, pctx, &sbparams);
532 self.pcache.v_pred.data[pidx] = has_nz;
533 self.pcache.v_pred_left[by] = has_nz;
534 has_ac[i] = has_nz > 0;
537 if self.dstate.has_y2 {
538 let y2block = &mut self.coeffs[24];
539 if self.mb_info[mb_idx].mb_type != VPMBType::Intra {
540 let mut dc = y2block[0];
541 let pval = self.dstate.pdc_pred_val;
542 if self.dstate.pdc_pred_count > 3 {
546 if (pval == 0) || (dc == 0) || ((pval ^ dc) < 0) {
547 self.dstate.pdc_pred_count = 0;
548 } else if dc == pval {
549 self.dstate.pdc_pred_count += 1;
551 self.dstate.pdc_pred_val = dc;
555 } else if y2block[0] != 0 {
559 self.coeffs[i][0] = self.coeffs[24][i];
564 idct4x4(&mut self.coeffs[i]);
565 } else if self.coeffs[i][0] != 0 {
566 idct4x4_dc(&mut self.coeffs[i]);
571 fn set_qmat(&mut self, y_dc_q: usize, y_ac_q: usize, y2_dc_q: usize, y2_ac_q: usize, uv_dc_q: usize, uv_ac_q: usize) {
572 self.qmat[0][0][0] = Y_DC_QUANTS[y_dc_q];
574 self.qmat[0][0][i] = Y_AC_QUANTS[y_ac_q];
576 self.qmat[0][1][0] = UV_DC_QUANTS[uv_dc_q];
578 self.qmat[0][1][i] = UV_AC_QUANTS[uv_ac_q];
580 self.qmat[0][2][0] = Y2_DC_QUANTS[y2_dc_q];
582 self.qmat[0][2][i] = Y2_AC_QUANTS[y2_ac_q];
584 if let Some(ref feat) = self.dstate.features[0] {
586 let q = feat.def_val[j] as usize;
587 self.qmat[j + 1][0][0] = Y_DC_QUANTS[q];
589 self.qmat[j + 1][0][i] = Y_AC_QUANTS[q];
591 self.qmat[j + 1][1][0] = UV_DC_QUANTS[q];
593 self.qmat[j + 1][1][i] = UV_AC_QUANTS[q];
595 self.qmat[j + 1][2][0] = Y2_DC_QUANTS[q];
597 self.qmat[j + 1][2][i] = Y2_AC_QUANTS[q];
602 fn fill_ymode(&mut self, mb_x: usize, mb_y: usize, ymode: PredMode) {
603 let mut iidx = mb_x * 4 + mb_y * 4 * self.ymode_stride;
606 self.ymodes[iidx + x] = ymode;
608 iidx += self.ymode_stride;
611 fn fill_mv(&mut self, mb_x: usize, mb_y: usize, mv: MV) {
612 let mut iidx = mb_x * 4 + mb_y * 4 * self.mv_stride;
615 self.mvs[iidx + x] = mv;
617 iidx += self.mb_w * 4;
620 fn find_mv_pred(&self, mb_x: usize, mb_y: usize) -> ([u8; 4], MV, MV, MV) {
621 const CAND_POS: [(i8, i8, u8, u8); 12] = [
622 (-1, 0, 8, 12), ( 0, -1, 8, 3),
623 (-1, -1, 2, 15), (-1, 1, 2, 12),
624 (-2, 0, 2, 12), ( 0, -2, 2, 3),
625 (-1, -2, 1, 15), (-2, -1, 1, 15),
626 (-2, 1, 1, 12), (-1, 2, 1, 12),
627 (-2, -2, 1, 15), (-2, 2, 1, 12)
630 let mut nearest_mv = ZERO_MV;
631 let mut near_mv = ZERO_MV;
633 let mut ct: [u8; 4] = [0; 4];
635 let start = if self.dstate.version == 0 { 1 } else { 0 };
636 let mvwrap = (self.mb_w as isize) + 1;
637 for (yoff, xoff, weight, blk_no) in CAND_POS.iter() {
638 let cx = (mb_x as isize) + (*xoff as isize);
639 let cy = (mb_y as isize) + (*yoff as isize);
640 let mvpos = cx + cy * mvwrap;
641 if (mvpos < start) || ((mvpos % mvwrap) == (mvwrap - 1)) {
645 let cx = (mvpos % mvwrap) as usize;
646 let cy = (mvpos / mvwrap) as usize;
647 let bx = (*blk_no as usize) & 3;
648 let by = (*blk_no as usize) >> 2;
649 let blk_pos = cx * 4 + bx + (cy * 4 + by) * self.mv_stride;
650 let mv = self.mvs[blk_pos];
656 if (nearest_mv == ZERO_MV) || (nearest_mv == mv) {
659 } else if near_mv == ZERO_MV {
663 idx = if mv == near_mv { 2 } else { 3 };
667 let pred_mv = if ct[1] > ct[2] {
668 if ct[1] >= ct[0] { nearest_mv } else { ZERO_MV }
670 if ct[2] >= ct[0] { near_mv } else { ZERO_MV }
673 let mvprobs = [INTER_MODE_PROBS[ct[0] as usize][0],
674 INTER_MODE_PROBS[ct[1] as usize][1],
675 INTER_MODE_PROBS[ct[2] as usize][2],
676 INTER_MODE_PROBS[ct[2] as usize][3]];
678 (mvprobs, nearest_mv, near_mv, pred_mv)
680 fn get_split_mv(&self, bc: &mut BoolCoder, mb_x: usize, mb_y: usize, bx: usize, by: usize, pred_mv: MV) -> MV {
681 let mode = bc.read_tree(SUB_MV_REF_TREE, &SUB_MV_REF_PROBS);
682 let mvidx = mb_x * 4 + bx + (mb_y * 4 + by) * self.mv_stride;
685 if (mb_x > 0) || (bx > 0) {
692 if (mb_y > 0) || (by > 0) {
693 self.mvs[mvidx - self.mv_stride]
698 SubMVRef::Zero => ZERO_MV,
700 let dmy = decode_mv_component(bc, &self.dstate.mv_probs[0]);
701 let dmx = decode_mv_component(bc, &self.dstate.mv_probs[1]);
702 pred_mv + MV{ x: dmx, y: dmy }
706 fn do_split_mv(&mut self, bc: &mut BoolCoder, mb_x: usize, mb_y: usize, pred_mv: MV) -> DecoderResult<()> {
707 let split_mode = bc.read_tree(MV_SPLIT_MODE_TREE, &MV_SPLIT_MODE_PROBS);
708 let mut mvidx = mb_x * 4 + mb_y * 4 * self.mv_stride;
710 MVSplitMode::TopBottom => {
711 let top_mv = self.get_split_mv(bc, mb_x, mb_y, 0, 0, pred_mv);
713 for x in 0..4 { self.mvs[mvidx + x] = top_mv; }
714 mvidx += self.mv_stride;
716 let bot_mv = self.get_split_mv(bc, mb_x, mb_y, 0, 2, pred_mv);
718 for x in 0..4 { self.mvs[mvidx + x] = bot_mv; }
719 mvidx += self.mv_stride;
722 MVSplitMode::LeftRight => {
723 let left_mv = self.get_split_mv(bc, mb_x, mb_y, 0, 0, pred_mv);
724 self.mvs[mvidx + 1] = left_mv;
725 let right_mv = self.get_split_mv(bc, mb_x, mb_y, 2, 0, pred_mv);
727 self.mvs[mvidx + 0] = left_mv;
728 self.mvs[mvidx + 1] = left_mv;
729 self.mvs[mvidx + 2] = right_mv;
730 self.mvs[mvidx + 3] = right_mv;
731 mvidx += self.mv_stride;
734 MVSplitMode::Quarters => {
735 for y in (0..4).step_by(2) {
736 for x in (0..4).step_by(2) {
737 self.mvs[mvidx + x] = self.get_split_mv(bc, mb_x, mb_y, x, y, pred_mv);
738 self.mvs[mvidx + x + 1] = self.mvs[mvidx + x];
741 self.mvs[mvidx + x + self.mv_stride] = self.mvs[mvidx + x];
743 mvidx += self.mv_stride * 2;
746 MVSplitMode::Sixteenths => {
749 self.mvs[mvidx + x] = self.get_split_mv(bc, mb_x, mb_y, x, y, pred_mv);
751 mvidx += self.mv_stride;
758 fn add_residue(&self, dframe: &mut NASimpleVideoFrame<u8>, mb_x: usize, mb_y: usize, do_luma: bool, pitch_mode: u8) {
760 let ydst = &mut dframe.data[dframe.offset[0]..];
761 let ystride = dframe.stride[0];
762 let mut yoff = mb_x * 16 + mb_y * 16 * ystride;
764 PITCH_MODE_NORMAL => {
767 add_coeffs4x4(ydst, yoff + x * 4, ystride, &self.coeffs[x + y * 4]);
774 add_coeffs16x1(ydst, yoff, &self.coeffs[y]);
781 add_coeffs4x4(ydst, yoff + x * 4, ystride * 2, &self.coeffs[x + y * 4]);
785 yoff -= 15 * ystride;
788 add_coeffs4x4(ydst, yoff + x * 4, ystride * 2, &self.coeffs[x + y * 4]);
796 add_coeffs4x4(ydst, yoff + x * 4, ystride * 4, &self.coeffs[x + y * 4]);
804 let dst = &mut dframe.data[0..];
805 let mut uoff = dframe.offset[1] + mb_x * 8 + mb_y * 8 * dframe.stride[1];
806 let ustride = dframe.stride[1];
807 let mut voff = dframe.offset[2] + mb_x * 8 + mb_y * 8 * dframe.stride[2];
808 let vstride = dframe.stride[2];
809 if (pitch_mode == PITCH_MODE_NORMAL) || (pitch_mode == PITCH_MODE_FOUR) {
812 add_coeffs4x4(dst, uoff + x * 4, ustride, &self.coeffs[16 + x + y * 2]);
813 add_coeffs4x4(dst, voff + x * 4, vstride, &self.coeffs[20 + x + y * 2]);
821 add_coeffs4x4(dst, uoff + x * 4, ustride * 2, &self.coeffs[16 + x + y * 2]);
822 add_coeffs4x4(dst, voff + x * 4, vstride * 2, &self.coeffs[20 + x + y * 2]);
829 fn recon_intra_mb(&mut self, dframe: &mut NASimpleVideoFrame<u8>, mb_x: usize, mb_y: usize) -> DecoderResult<()> {
830 let pitch = self.dstate.force_pitch.unwrap_or(0);
831 let pitch_mode = (pitch >> 3) & 3;
833 let mb_idx = mb_x + mb_y * self.mb_w;
834 let has_top = mb_y > 0;
835 let has_left = mb_x > 0;
836 let ydst = &mut dframe.data[dframe.offset[0]..];
837 let ystride = dframe.stride[0];
838 let mut yoff = mb_x * 16 + mb_y * 16 * ystride;
839 let ipred_ctx_y = &mut self.dstate.ipred_ctx_y;
840 ipred_ctx_y.has_top = has_top;
841 ipred_ctx_y.has_left = has_left;
842 let is_normal = self.mb_info[mb_idx].ymode != PredMode::BPred;
844 ipred_ctx_y.fill(ydst, yoff, ystride, 16, 16);
845 match self.mb_info[mb_idx].ymode {
846 PredMode::DCPred => IPred16x16::ipred_dc(ydst, yoff, ystride, ipred_ctx_y),
847 PredMode::HPred => IPred16x16::ipred_h (ydst, yoff, ystride, ipred_ctx_y),
848 PredMode::VPred => IPred16x16::ipred_v (ydst, yoff, ystride, ipred_ctx_y),
849 PredMode::TMPred => IPred16x16::ipred_tm(ydst, yoff, ystride, ipred_ctx_y),
853 validate!((pitch_mode == PITCH_MODE_NORMAL) || (pitch_mode == PITCH_MODE_X2));
854 let mut iidx = mb_x * 4 + mb_y * 4 * self.ymode_stride;
855 let mut tr_save = [0x80u8; 16];
856 if pitch_mode == PITCH_MODE_X2 {
857 // reorganise coefficient data for interlaced case
858 for y in (0..4).step_by(2) {
860 let mut tmpblock = [0i16; 16 * 2];
861 let eidx = x + y * 4;
862 let oidx = x + y * 4 + 4;
865 tmpblock[i * 8 + 0 + j] = self.coeffs[eidx][i * 4 + j];
866 tmpblock[i * 8 + 4 + j] = self.coeffs[oidx][i * 4 + j];
869 self.coeffs[eidx].copy_from_slice(&tmpblock[0..16]);
870 self.coeffs[oidx].copy_from_slice(&tmpblock[16..32]);
874 let tr_edge = if has_top { ydst[yoff - ystride + 15] } else { 0x80 };
877 ipred_ctx_y.has_left = has_left || x > 0;
878 let bmode = self.ymodes[iidx + x];
879 let cur_yoff = yoff + x * 4;
880 let has_tr = ipred_ctx_y.has_top && ((x < 3) || ((y == 0) && (mb_y < self.mb_w - 1)));
881 let has_dl = ipred_ctx_y.has_left && (x == 0) && (y < 3);
882 ipred_ctx_y.fill(ydst, cur_yoff, ystride,
883 if has_tr { 8 } else { 4 },
884 if has_dl { 8 } else { 4 });
887 ipred_ctx_y.top[i + 4] = tr_save[x * 4 + i];
891 tr_save[x * 4 + i] = ipred_ctx_y.top[i + 4];
894 if (mb_x == self.mb_w - 1) && has_top && (x == 3) {
896 ipred_ctx_y.top[i + 4] = tr_edge;
900 PredMode::DCPred => IPred4x4::ipred_dc(ydst, cur_yoff, ystride, ipred_ctx_y),
901 PredMode::TMPred => IPred4x4::ipred_tm(ydst, cur_yoff, ystride, ipred_ctx_y),
902 PredMode::HPred => IPred4x4::ipred_he(ydst, cur_yoff, ystride, ipred_ctx_y),
903 PredMode::VPred => IPred4x4::ipred_ve(ydst, cur_yoff, ystride, ipred_ctx_y),
904 PredMode::LDPred => IPred4x4::ipred_ld(ydst, cur_yoff, ystride, ipred_ctx_y),
905 PredMode::RDPred => IPred4x4::ipred_rd(ydst, cur_yoff, ystride, ipred_ctx_y),
906 PredMode::VRPred => IPred4x4::ipred_vr(ydst, cur_yoff, ystride, ipred_ctx_y),
907 PredMode::VLPred => IPred4x4::ipred_vl(ydst, cur_yoff, ystride, ipred_ctx_y),
908 PredMode::HDPred => IPred4x4::ipred_hd(ydst, cur_yoff, ystride, ipred_ctx_y),
909 PredMode::HUPred => IPred4x4::ipred_hu(ydst, cur_yoff, ystride, ipred_ctx_y),
912 add_coeffs4x4(ydst, cur_yoff, ystride, &self.coeffs[x + y * 4]);
914 ipred_ctx_y.has_top = true;
916 iidx += self.ymode_stride;
919 let dst = &mut dframe.data[0..];
920 let uoff = dframe.offset[1] + mb_x * 8 + mb_y * 8 * dframe.stride[1];
921 let ustride = dframe.stride[1];
922 let voff = dframe.offset[2] + mb_x * 8 + mb_y * 8 * dframe.stride[2];
923 let vstride = dframe.stride[2];
924 let ipred_ctx_u = &mut self.dstate.ipred_ctx_u;
925 let ipred_ctx_v = &mut self.dstate.ipred_ctx_v;
926 ipred_ctx_u.has_top = has_top;
927 ipred_ctx_v.has_top = has_top;
928 ipred_ctx_u.has_left = has_left;
929 ipred_ctx_v.has_left = has_left;
930 ipred_ctx_u.fill(dst, uoff, ustride, 8, 8);
931 ipred_ctx_v.fill(dst, voff, vstride, 8, 8);
932 match self.mb_info[mb_idx].uvmode {
933 PredMode::DCPred => {
934 IPred8x8::ipred_dc(dst, uoff, ustride, ipred_ctx_u);
935 IPred8x8::ipred_dc(dst, voff, vstride, ipred_ctx_v);
938 IPred8x8::ipred_h(dst, uoff, ustride, ipred_ctx_u);
939 IPred8x8::ipred_h(dst, voff, vstride, ipred_ctx_v);
942 IPred8x8::ipred_v(dst, uoff, ustride, ipred_ctx_u);
943 IPred8x8::ipred_v(dst, voff, vstride, ipred_ctx_v);
945 PredMode::TMPred => {
946 IPred8x8::ipred_tm(dst, uoff, ustride, ipred_ctx_u);
947 IPred8x8::ipred_tm(dst, voff, vstride, ipred_ctx_v);
951 self.add_residue(dframe, mb_x, mb_y, is_normal, pitch_mode);
954 fn recon_inter_mb(&mut self, dframe: &mut NASimpleVideoFrame<u8>, mb_x: usize, mb_y: usize, use_last: bool) {
955 let pitch = self.dstate.force_pitch.unwrap_or(0);
956 let pitch_dmode = (pitch >> 3) & 3;
957 let pitch_smode = pitch & 7;
959 let refframe = (if use_last { self.shuf.get_last() } else { self.shuf.get_golden() }).unwrap();
960 let single_mv = self.mb_info[mb_x + mb_y * self.mb_w].mb_type != VPMBType::InterFourMV;
961 let mut iidx = mb_x * 4 + mb_y * 4 * self.mv_stride;
962 let mut mc_buf = self.mc_buf.get_data_mut().unwrap();
964 let dst = &mut dframe.data[0..];
965 let ystride = dframe.stride[0];
966 let mut yoff = dframe.offset[0] + mb_x * 16 + mb_y * 16 * ystride;
967 if pitch_smode == 0 {
969 mc_block16x16(dst, yoff, ystride, mb_x * 16, mb_y * 16,
970 self.mvs[iidx].x * 2, self.mvs[iidx].y * 2, refframe.clone(), 0, &mut mc_buf);
974 mc_block4x4(dst, yoff + x * 4, ystride, mb_x * 16 + x * 4, mb_y * 16 + y * 4,
975 self.mvs[iidx + x].x * 2, self.mvs[iidx + x].y * 2, refframe.clone(), 0, &mut mc_buf);
978 iidx += self.mv_stride;
983 mc_block_special(dst, yoff, ystride, mb_x * 16, mb_y * 16,
984 self.mvs[iidx].x * 2, self.mvs[iidx].y * 2,
985 refframe.clone(), 0, &mut mc_buf, 16, pitch_smode);
989 mc_block_special(dst, yoff + x * 4, ystride,
990 mb_x * 16 + x * 4, mb_y * 16 + y * 4,
991 self.mvs[iidx + x].x * 2, self.mvs[iidx + x].y * 2,
992 refframe.clone(), 0, &mut mc_buf, 4, pitch_smode);
995 iidx += self.mv_stride;
1000 let mut iidx = mb_x * 4 + mb_y * 4 * self.mv_stride;
1001 let mut uoff = dframe.offset[1] + mb_x * 8 + mb_y * 8 * dframe.stride[1];
1002 let ustride = dframe.stride[1];
1003 let mut voff = dframe.offset[2] + mb_x * 8 + mb_y * 8 * dframe.stride[2];
1004 let vstride = dframe.stride[2];
1006 let chroma_mv = self.mvs[iidx];
1008 if pitch_smode == 0 {
1009 mc_block8x8(dst, uoff, ustride, mb_x * 8, mb_y * 8, chroma_mv.x, chroma_mv.y, refframe.clone(), 1, &mut mc_buf);
1010 mc_block8x8(dst, voff, vstride, mb_x * 8, mb_y * 8, chroma_mv.x, chroma_mv.y, refframe.clone(), 2, &mut mc_buf);
1012 mc_block_special(dst, uoff, ustride, mb_x * 8, mb_y * 8, chroma_mv.x, chroma_mv.y,
1013 refframe.clone(), 1, &mut mc_buf, 8, pitch_smode);
1014 mc_block_special(dst, voff, vstride, mb_x * 8, mb_y * 8, chroma_mv.x, chroma_mv.y,
1015 refframe.clone(), 2, &mut mc_buf, 8, pitch_smode);
1020 let mut chroma_mv = self.mvs[iidx + x * 2] + self.mvs[iidx + x * 2 + 1]
1021 + self.mvs[iidx + x * 2 + self.mv_stride]
1022 + self.mvs[iidx + x * 2 + self.mv_stride + 1];
1023 if chroma_mv.x < 0 {
1028 if chroma_mv.y < 0 {
1036 if pitch_smode == 0 {
1037 mc_block4x4(dst, uoff + x * 4, ustride, mb_x * 8 + x * 4, mb_y * 8 + y * 4,
1038 chroma_mv.x, chroma_mv.y, refframe.clone(), 1, &mut mc_buf);
1039 mc_block4x4(dst, voff + x * 4, vstride, mb_x * 8 + x * 4, mb_y * 8 + y * 4,
1040 chroma_mv.x, chroma_mv.y, refframe.clone(), 2, &mut mc_buf);
1042 mc_block_special(dst, uoff + x * 4, ustride, mb_x * 8 + x * 4, mb_y * 8 + y * 4,
1043 chroma_mv.x, chroma_mv.y, refframe.clone(), 1, &mut mc_buf,
1045 mc_block_special(dst, voff + x * 4, vstride, mb_x * 8 + x * 4, mb_y * 8 + y * 4,
1046 chroma_mv.x, chroma_mv.y, refframe.clone(), 2, &mut mc_buf,
1050 uoff += ustride * 4;
1051 voff += vstride * 4;
1052 iidx += 2 * self.mv_stride;
1055 self.add_residue(dframe, mb_x, mb_y, true, pitch_dmode);
1057 fn loop_filter_mb(&mut self, dframe: &mut NASimpleVideoFrame<u8>, mb_x: usize, mb_y: usize, loop_str: u8) {
1058 const HIGH_EDGE_VAR_THR: [[u8; 64]; 2] = [
1060 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1,
1061 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
1062 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3,
1063 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3
1065 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1,
1066 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1067 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2,
1068 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2
1071 let edge_thr = (loop_str as i16) + 2;
1072 let luma_thr = loop_str as i16;
1073 let chroma_thr = (loop_str as i16) * 2;
1074 let inner_thr = if self.dstate.loop_sharpness == 0 {
1077 let bound1 = (9 - self.dstate.loop_sharpness) as i16;
1078 let shift = (self.dstate.loop_sharpness + 3) >> 2;
1079 ((loop_str as i16) >> shift).min(bound1)
1081 let hev_thr = HIGH_EDGE_VAR_THR[if self.dstate.is_intra { 1 } else { 0 }][loop_str as usize] as i16;
1083 let ystride = dframe.stride[0];
1084 let ustride = dframe.stride[1];
1085 let vstride = dframe.stride[2];
1086 let ypos = dframe.offset[0] + mb_x * 16 + mb_y * 16 * ystride;
1087 let upos = dframe.offset[1] + mb_x * 8 + mb_y * 8 * ustride;
1088 let vpos = dframe.offset[2] + mb_x * 8 + mb_y * 8 * vstride;
1090 let (loop_edge, loop_inner) = if self.dstate.lf_simple {
1091 (simple_loop_filter as LoopFilterFunc, simple_loop_filter as LoopFilterFunc)
1093 (normal_loop_filter_edge as LoopFilterFunc, normal_loop_filter_inner as LoopFilterFunc)
1097 loop_edge(dframe.data, ypos, 1, ystride, 16, edge_thr, inner_thr, hev_thr);
1098 loop_edge(dframe.data, upos, 1, ustride, 8, edge_thr, inner_thr, hev_thr);
1099 loop_edge(dframe.data, vpos, 1, vstride, 8, edge_thr, inner_thr, hev_thr);
1102 loop_edge(dframe.data, ypos, ystride, 1, 16, edge_thr, inner_thr, hev_thr);
1103 loop_edge(dframe.data, upos, ustride, 1, 8, edge_thr, inner_thr, hev_thr);
1104 loop_edge(dframe.data, vpos, vstride, 1, 8, edge_thr, inner_thr, hev_thr);
1108 loop_inner(dframe.data, ypos + y * 4 * ystride, ystride, 1, 16, luma_thr, inner_thr, hev_thr);
1110 loop_inner(dframe.data, upos + 4 * ustride, ustride, 1, 8, chroma_thr, inner_thr, hev_thr);
1111 loop_inner(dframe.data, vpos + 4 * vstride, vstride, 1, 8, chroma_thr, inner_thr, hev_thr);
1114 loop_inner(dframe.data, ypos + x * 4, 1, ystride, 16, luma_thr, inner_thr, hev_thr);
1116 loop_inner(dframe.data, upos + 4, 1, ustride, 8, chroma_thr, inner_thr, hev_thr);
1117 loop_inner(dframe.data, vpos + 4, 1, vstride, 8, chroma_thr, inner_thr, hev_thr);
1121 impl NADecoder for VP7Decoder {
1122 fn init(&mut self, supp: &mut NADecoderSupport, info: NACodecInfoRef) -> DecoderResult<()> {
1123 if let NACodecTypeInfo::Video(vinfo) = info.get_properties() {
1124 let fmt = YUV420_FORMAT;
1125 let myvinfo = NAVideoInfo::new(vinfo.get_width(), vinfo.get_height(), false, fmt);
1126 let myinfo = NACodecTypeInfo::Video(myvinfo.clone());
1127 self.info = NACodecInfo::new_ref(info.get_name(), myinfo, info.get_extradata()).into_ref();
1129 supp.pool_u8.set_dec_bufs(4);
1130 supp.pool_u8.prealloc_video(NAVideoInfo::new(myvinfo.get_width(), myvinfo.get_height(), false, vinfo.get_format()), 4)?;
1131 self.set_dimensions(myvinfo.get_width(), myvinfo.get_height());
1134 Err(DecoderError::InvalidData)
1137 fn decode(&mut self, supp: &mut NADecoderSupport, pkt: &NAPacket) -> DecoderResult<NAFrameRef> {
1138 let src = pkt.get_buffer();
1140 validate!(src.len() > 4);
1142 let frame_tag = read_u24le(src.as_slice())?;
1143 self.dstate.is_intra = (frame_tag & 1) == 0;
1144 self.dstate.version = ((frame_tag >> 1) & 7) as u8;
1145 let part2_off = (frame_tag >> 4) as usize;
1146 let part1_off = if self.dstate.version == 0 { 4 } else { 3 };
1148 validate!(src.len() > part1_off + part2_off);
1149 let mut bc = BoolCoder::new(&src[part1_off..][..part2_off])?;
1150 let mut bc_main = BoolCoder::new(&src[part1_off + part2_off..])?;
1151 if self.dstate.is_intra {
1152 let width = bc.read_bits(12) as usize;
1153 let height = bc.read_bits(12) as usize;
1154 let _scalev = bc.read_bits(2);
1155 let _scaleh = bc.read_bits(2);
1156 validate!((width > 0) && (height > 0));
1157 self.set_dimensions(width, height);
1159 self.dstate.reset();
1161 if !self.shuf.has_refs() {
1162 return Err(DecoderError::MissingReference);
1166 self.read_features(&mut bc)?;
1168 let y_ac_q = bc.read_bits(7) as usize;
1169 let y_dc_q = if bc.read_bool() { bc.read_bits(7) as usize } else { y_ac_q };
1170 let y2_dc_q = if bc.read_bool() { bc.read_bits(7) as usize } else { y_ac_q };
1171 let y2_ac_q = if bc.read_bool() { bc.read_bits(7) as usize } else { y_ac_q };
1172 let uv_dc_q = if bc.read_bool() { bc.read_bits(7) as usize } else { y_ac_q };
1173 let uv_ac_q = if bc.read_bool() { bc.read_bits(7) as usize } else { y_ac_q };
1174 self.set_qmat(y_dc_q, y_ac_q, y2_dc_q, y2_ac_q, uv_dc_q, uv_ac_q);
1176 let update_gf = if self.dstate.is_intra { true } else { bc.read_bool() };
1178 let mut has_fading_feature = true;
1179 let mut keep_probs = true;
1180 if self.dstate.version != 0 {
1181 keep_probs = bc.read_bool();
1182 if self.dstate.is_intra {
1183 has_fading_feature = true;
1185 has_fading_feature = bc.read_bool();
1189 if has_fading_feature {
1190 self.dstate.fading = bc.read_bool();
1191 if self.dstate.fading {
1192 self.dstate.fade_alpha = bc.read_sbits(8) as u16;
1193 self.dstate.fade_beta = bc.read_sbits(8) as u16;
1194 if let Some(pframe) = self.shuf.get_last() {
1195 let mut fframe = supp.pool_u8.get_free().unwrap();
1196 let mut dframe = NASimpleVideoFrame::from_video_buf(&mut fframe).unwrap();
1197 fade_frame(pframe, &mut dframe, self.dstate.fade_alpha, self.dstate.fade_beta);
1198 self.shuf.add_frame(fframe);
1202 self.dstate.fading = false;
1205 if self.dstate.version == 0 {
1206 self.dstate.lf_simple = bc.read_bool();
1211 self.scan[i] = DEFAULT_SCAN_ORDER[bc.read_bits(4) as usize];
1215 if self.dstate.version != 0 {
1216 self.dstate.lf_simple = bc.read_bool();
1218 self.dstate.lf_simple = false;
1221 self.dstate.loop_filter_level = bc.read_bits(6) as u8;
1222 self.dstate.loop_sharpness = bc.read_bits(3) as u8;
1224 self.read_dct_coef_prob_upd(&mut bc)?;
1226 if !self.dstate.is_intra {
1227 self.dstate.prob_intra_pred = bc.read_byte();
1228 self.dstate.prob_last_pred = bc.read_byte();
1231 self.dstate.kf_ymode_prob[i] = bc.read_byte();
1236 self.dstate.kf_uvmode_prob[i] = bc.read_byte();
1239 self.read_mv_prob_upd(&mut bc)?;
1242 self.tmp_scan.copy_from_slice(&self.scan);
1245 let vinfo = NAVideoInfo::new(self.width, self.height, false, YUV420_FORMAT);
1246 let ret = supp.pool_u8.get_free();
1248 return Err(DecoderError::AllocError);
1250 let mut buf = ret.unwrap();
1251 if buf.get_info() != vinfo {
1253 supp.pool_u8.reset();
1254 supp.pool_u8.prealloc_video(vinfo, 4)?;
1255 let ret = supp.pool_u8.get_free();
1257 return Err(DecoderError::AllocError);
1261 let mut dframe = NASimpleVideoFrame::from_video_buf(&mut buf).unwrap();
1264 self.pcache.reset();
1265 if self.dstate.is_intra || (self.dstate.version > 0) {
1266 self.dstate.pdc_pred_val = 0;
1267 self.dstate.pdc_pred_count = 0;
1269 let mut use_last = true;
1270 for mb_y in 0..self.mb_h {
1271 for mb_x in 0..self.mb_w {
1272 self.decode_mb_features(&mut bc, mb_x, mb_y)?;
1273 self.dstate.has_y2 = true;
1274 if self.dstate.is_intra {
1275 let ymode = bc.read_tree(KF_Y_MODE_TREE, KF_Y_MODE_TREE_PROBS);
1276 if ymode == PredMode::BPred {
1277 self.dstate.has_y2 = false;
1278 let mut iidx = mb_x * 4 + mb_y * 4 * self.ymode_stride;
1281 let top_mode = if (y > 0) || (mb_y > 0) {
1282 self.ymodes[iidx + x - self.ymode_stride]
1286 let left_mode = if (x > 0) || (mb_x > 0) {
1287 self.ymodes[iidx + x - 1]
1291 let top_idx = top_mode.to_b_index();
1292 let left_idx = left_mode.to_b_index();
1293 let bmode = bc.read_tree(B_MODE_TREE, &KF_B_MODE_TREE_PROBS[top_idx][left_idx]);
1294 self.ymodes[iidx + x] = bmode;
1296 iidx += self.ymode_stride;
1299 self.fill_ymode(mb_x, mb_y, ymode.to_b_mode());
1301 let uvmode = bc.read_tree(UV_MODE_TREE, KF_UV_MODE_TREE_PROBS);
1302 self.mb_info[mb_idx].mb_type = VPMBType::Intra;
1303 self.mb_info[mb_idx].ymode = ymode;
1304 self.mb_info[mb_idx].uvmode = uvmode;
1305 } else if !bc.read_prob(self.dstate.prob_intra_pred) {
1306 let ymode = bc.read_tree(Y_MODE_TREE, &self.dstate.kf_ymode_prob);
1307 if ymode == PredMode::BPred {
1308 self.dstate.has_y2 = false;
1309 let mut iidx = mb_x * 4 + mb_y * 4 * self.ymode_stride;
1312 let bmode = bc.read_tree(B_MODE_TREE, B_MODE_TREE_PROBS);
1313 self.ymodes[iidx + x] = bmode;
1315 iidx += self.ymode_stride;
1318 self.fill_ymode(mb_x, mb_y, PredMode::Inter);
1320 let uvmode = bc.read_tree(UV_MODE_TREE, &self.dstate.kf_uvmode_prob);
1321 self.mb_info[mb_idx].mb_type = VPMBType::Intra;
1322 self.mb_info[mb_idx].ymode = ymode;
1323 self.mb_info[mb_idx].uvmode = uvmode;
1324 self.fill_mv(mb_x, mb_y, ZERO_MV);
1326 use_last = !bc.read_prob(self.dstate.prob_last_pred);
1328 let (mvprobs, nearest_mv, near_mv, pred_mv) = self.find_mv_pred(mb_x, mb_y);
1329 let mbtype = bc.read_tree(MV_REF_TREE, &mvprobs);
1332 VPMBType::InterNearest => {
1333 self.fill_mv(mb_x, mb_y, nearest_mv);
1335 VPMBType::InterNear => {
1336 self.fill_mv(mb_x, mb_y, near_mv);
1338 VPMBType::InterNoMV => {
1339 self.fill_mv(mb_x, mb_y, ZERO_MV);
1341 VPMBType::InterMV => {
1342 let dmy = decode_mv_component(&mut bc, &self.dstate.mv_probs[0]);
1343 let dmx = decode_mv_component(&mut bc, &self.dstate.mv_probs[1]);
1344 let new_mv = pred_mv + MV{ x: dmx, y: dmy };
1345 self.fill_mv(mb_x, mb_y, new_mv);
1347 VPMBType::InterFourMV => {
1348 self.do_split_mv(&mut bc, mb_x, mb_y, pred_mv)?;
1350 _ => unreachable!(),
1353 self.fill_ymode(mb_x, mb_y, PredMode::Inter);
1354 self.mb_info[mb_idx].mb_type = mbtype;
1355 self.mb_info[mb_idx].ymode = PredMode::Inter;
1356 self.mb_info[mb_idx].uvmode = PredMode::Inter;
1358 self.decode_residue(&mut bc_main, mb_x, mb_idx);
1359 match self.mb_info[mb_idx].mb_type {
1360 VPMBType::Intra => {
1361 self.recon_intra_mb(&mut dframe, mb_x, mb_y)?;
1364 self.recon_inter_mb(&mut dframe, mb_x, mb_y, use_last);
1367 if let Some(loop_str) = self.dstate.force_loop_str {
1368 self.mb_info[mb_idx].loop_str = loop_str;
1370 self.mb_info[mb_idx].loop_str = self.dstate.loop_filter_level;
1372 self.mb_info[mb_idx].upd_gf = self.dstate.force_gf_update;
1375 self.pcache.update_row();
1378 for mb_y in 0..self.mb_h {
1379 for mb_x in 0..self.mb_w {
1380 let loop_str = self.mb_info[mb_idx].loop_str;
1381 self.loop_filter_mb(&mut dframe, mb_x, mb_y, loop_str);
1385 if !update_gf && self.dstate.features[2].is_some() {
1386 let gf = self.shuf.get_golden().unwrap();
1387 let mut new_gf = supp.pool_u8.get_copy(&gf).unwrap();
1388 let dframe = NASimpleVideoFrame::from_video_buf(&mut new_gf).unwrap();
1390 let mut mc_buf = self.mc_buf.get_data_mut().unwrap();
1391 for mb_y in 0..self.mb_h {
1392 for mb_x in 0..self.mb_w {
1393 if self.mb_info[mb_idx].upd_gf {
1394 mc_block16x16(dframe.data, dframe.offset[0] + mb_x * 16 + mb_y * 16 * dframe.stride[0], dframe.stride[0], mb_x * 16, mb_y * 16, 0, 0, buf.clone(), 0, &mut mc_buf);
1395 mc_block8x8(dframe.data, dframe.offset[1] + mb_x * 8 + mb_y * 8 * dframe.stride[1], dframe.stride[1], mb_x * 8, mb_y * 8, 0, 0, buf.clone(), 1, &mut mc_buf);
1396 mc_block8x8(dframe.data, dframe.offset[2] + mb_x * 8 + mb_y * 8 * dframe.stride[2], dframe.stride[2], mb_x * 8, mb_y * 8, 0, 0, buf.clone(), 2, &mut mc_buf);
1401 self.shuf.add_golden_frame(new_gf);
1405 self.scan.copy_from_slice(&self.tmp_scan);
1408 self.shuf.add_golden_frame(buf.clone());
1410 self.shuf.add_frame(buf.clone());
1412 let mut frm = NAFrame::new_from_pkt(pkt, self.info.clone(), NABufferType::Video(buf));
1413 frm.set_keyframe(self.dstate.is_intra);
1414 frm.set_frame_type(if self.dstate.is_intra { FrameType::I } else { FrameType::P });
1417 fn flush(&mut self) {
1422 pub fn get_decoder() -> Box<NADecoder + Send> {
1423 Box::new(VP7Decoder::new())
1428 use nihav_core::codecs::RegisteredDecoders;
1429 use nihav_core::demuxers::RegisteredDemuxers;
1430 use nihav_core::test::dec_video::*;
1431 use crate::codecs::duck_register_all_codecs;
1432 use nihav_commonfmt::demuxers::generic_register_all_demuxers;
1436 let mut dmx_reg = RegisteredDemuxers::new();
1437 generic_register_all_demuxers(&mut dmx_reg);
1438 let mut dec_reg = RegisteredDecoders::new();
1439 duck_register_all_codecs(&mut dec_reg);
1441 //let file = "assets/Duck/potter-40.vp7";
1442 //let file = "assets/Duck/potter-500.vp7";
1443 //let file = "assets/Duck/starsky-700.vp7";
1444 //let file = "assets/Duck/taking-700.vp7";
1445 //let file = "assets/Duck/troy-700.vp7";
1446 let file = "assets/Duck/interlaced_blit_pitch.avi";
1447 //let file = "assets/Duck/vp7.avi";
1448 test_file_decoding("avi", file, Some(12), true, false, None/*Some("vp7")*/, &dmx_reg, &dec_reg);
1452 /*const DEFAULT_ZIGZAG: [usize; 16] = [
1458 const DEFAULT_SCAN_ORDER: [usize; 16] = [
1465 const Y_MODE_TREE: &[VPTreeDef<PredMode>] = &[
1466 VPTreeDef::Value(PredMode::DCPred), VPTreeDef::Index(2),
1467 VPTreeDef::Index(4), VPTreeDef::Index(6),
1468 VPTreeDef::Value(PredMode::VPred), VPTreeDef::Value(PredMode::HPred),
1469 VPTreeDef::Value(PredMode::TMPred), VPTreeDef::Value(PredMode::BPred),
1471 const KF_Y_MODE_TREE: &[VPTreeDef<PredMode>] = &[
1472 VPTreeDef::Value(PredMode::BPred), VPTreeDef::Index(2),
1473 VPTreeDef::Index(4), VPTreeDef::Index(6),
1474 VPTreeDef::Value(PredMode::DCPred), VPTreeDef::Value(PredMode::VPred),
1475 VPTreeDef::Value(PredMode::HPred), VPTreeDef::Value(PredMode::TMPred),
1477 const UV_MODE_TREE: &[VPTreeDef<PredMode>] = &[
1478 VPTreeDef::Value(PredMode::DCPred), VPTreeDef::Index(2),
1479 VPTreeDef::Value(PredMode::VPred), VPTreeDef::Index(4),
1480 VPTreeDef::Value(PredMode::HPred), VPTreeDef::Value(PredMode::TMPred)
1482 const B_MODE_TREE: &[VPTreeDef<PredMode>] = &[
1483 VPTreeDef::Value(PredMode::DCPred), VPTreeDef::Index(2),
1484 VPTreeDef::Value(PredMode::TMPred), VPTreeDef::Index(4),
1485 VPTreeDef::Value(PredMode::VPred), VPTreeDef::Index(6),
1486 VPTreeDef::Index(8), VPTreeDef::Index(12),
1487 VPTreeDef::Value(PredMode::HPred), VPTreeDef::Index(10),
1488 VPTreeDef::Value(PredMode::RDPred), VPTreeDef::Value(PredMode::VRPred),
1489 VPTreeDef::Value(PredMode::LDPred), VPTreeDef::Index(14),
1490 VPTreeDef::Value(PredMode::VLPred), VPTreeDef::Index(16),
1491 VPTreeDef::Value(PredMode::HDPred), VPTreeDef::Value(PredMode::HUPred)
1494 const FEATURE_TREE: &[VPTreeDef<usize>] = &[
1495 VPTreeDef::Index(2), VPTreeDef::Index(4),
1496 VPTreeDef::Value(0), VPTreeDef::Value(1),
1497 VPTreeDef::Value(2), VPTreeDef::Value(3)
1500 const COEF_TREE: &[VPTreeDef<DCTToken>] = &[
1501 VPTreeDef::Value(DCTToken::EOB), VPTreeDef::Index(2),
1502 VPTreeDef::Value(DCTToken::Zero), VPTreeDef::Index(4),
1503 VPTreeDef::Value(DCTToken::One), VPTreeDef::Index(6),
1504 VPTreeDef::Index(8), VPTreeDef::Index(12),
1505 VPTreeDef::Value(DCTToken::Two), VPTreeDef::Index(10),
1506 VPTreeDef::Value(DCTToken::Three), VPTreeDef::Value(DCTToken::Four),
1507 VPTreeDef::Index(14), VPTreeDef::Index(16),
1508 VPTreeDef::Value(DCTToken::Cat1), VPTreeDef::Value(DCTToken::Cat2),
1509 VPTreeDef::Index(18), VPTreeDef::Index(20),
1510 VPTreeDef::Value(DCTToken::Cat3), VPTreeDef::Value(DCTToken::Cat4),
1511 VPTreeDef::Value(DCTToken::Cat5), VPTreeDef::Value(DCTToken::Cat6)
1514 const MV_REF_TREE: &[VPTreeDef<VPMBType>] = &[
1515 VPTreeDef::Value(VPMBType::InterNoMV), VPTreeDef::Index(2),
1516 VPTreeDef::Value(VPMBType::InterNearest), VPTreeDef::Index(4),
1517 VPTreeDef::Value(VPMBType::InterNear), VPTreeDef::Index(6),
1518 VPTreeDef::Value(VPMBType::InterMV), VPTreeDef::Value(VPMBType::InterFourMV)
1520 const SMALL_MV_TREE: &[VPTreeDef<i16>] = &[
1521 VPTreeDef::Index(2), VPTreeDef::Index(8),
1522 VPTreeDef::Index(4), VPTreeDef::Index(6),
1523 VPTreeDef::Value(0), VPTreeDef::Value(1),
1524 VPTreeDef::Value(2), VPTreeDef::Value(3),
1525 VPTreeDef::Index(10), VPTreeDef::Index(12),
1526 VPTreeDef::Value(4), VPTreeDef::Value(5),
1527 VPTreeDef::Value(6), VPTreeDef::Value(7)
1529 const MV_SPLIT_MODE_TREE: &[VPTreeDef<MVSplitMode>] = &[
1530 VPTreeDef::Value(MVSplitMode::Sixteenths), VPTreeDef::Index(2),
1531 VPTreeDef::Value(MVSplitMode::Quarters), VPTreeDef::Index(4),
1532 VPTreeDef::Value(MVSplitMode::TopBottom), VPTreeDef::Value(MVSplitMode::LeftRight)
1534 const SUB_MV_REF_TREE: &[VPTreeDef<SubMVRef>] = &[
1535 VPTreeDef::Value(SubMVRef::Left), VPTreeDef::Index(2),
1536 VPTreeDef::Value(SubMVRef::Above), VPTreeDef::Index(4),
1537 VPTreeDef::Value(SubMVRef::Zero), VPTreeDef::Value(SubMVRef::New)