vp7: fix intra prediction corner cases
[nihav.git] / nihav-duck / src / codecs / vp7.rs
CommitLineData
587a6d78
KS
1use nihav_core::codecs::*;
2use nihav_core::io::byteio::*;
3use nihav_core::data::GenericCache;
4use super::vpcommon::*;
5use super::vp7data::*;
6use super::vp7dsp::*;
7
8enum VPTreeDef<T: Copy> {
9 Index(u8),
10 Value(T),
11}
12
13trait VPTreeReader {
14 fn read_tree<T:Copy>(&mut self, tree_def: &[VPTreeDef<T>], tree_prob: &[u8]) -> T;
15}
16
17impl<'a> VPTreeReader for BoolCoder<'a> {
18 fn read_tree<T:Copy>(&mut self, tree_def: &[VPTreeDef<T>], tree_prob: &[u8]) -> T {
19 let mut idx = 0;
20
21 loop {
22 let bit = self.read_prob(tree_prob[idx >> 1]);
23 match tree_def[idx + (bit as usize)] {
24 VPTreeDef::Value(v) => return v,
25 VPTreeDef::Index(ix) => { idx = ix as usize; },
26 };
27 }
28 }
29}
30
31#[repr(u8)]
32#[derive(Clone,Copy,PartialEq,Debug)]
33enum PredMode {
34 DCPred,
35 HPred,
36 VPred,
37 TMPred,
38 BPred,
39
40 //sub-block prediction modes
41 LDPred,
42 RDPred,
43 VRPred,
44 VLPred,
45 HDPred,
46 HUPred,
47
48 Inter,
49}
50
51impl Default for PredMode {
52 fn default() -> Self { PredMode::DCPred }
53}
54
55impl PredMode {
56 fn to_b_mode(self) -> Self {
57 if self == PredMode::DCPred {
58 self
59 } else {
60 PredMode::TMPred
61 }
62 }
63 fn to_b_index(self) -> usize {
64 match self {
65 PredMode::DCPred => 0,
66 PredMode::TMPred => 1,
67 PredMode::VPred => 2,
68 PredMode::HPred => 3,
69 PredMode::LDPred => 4,
70 PredMode::RDPred => 5,
71 PredMode::VRPred => 6,
72 PredMode::VLPred => 7,
73 PredMode::HDPred => 8,
74 PredMode::HUPred => 9,
75 _ => unreachable!(),
76 }
77 }
78}
79
80const PITCH_MODE_NORMAL: u8 = 0;
81const PITCH_MODE_FOUR: u8 = 1;
82const PITCH_MODE_X2: u8 = 2;
83const PITCH_MODE_X4: u8 = 3;
84
85#[derive(Clone,Copy,Default)]
86struct MBFeature {
87 present_prob: u8,
88 tree_probs: [u8; 3],
89 def_val: [u8; 4],
90}
91
92#[derive(Clone,Copy,PartialEq)]
93enum DCTToken {
94 Zero,
95 One,
96 Two,
97 Three,
98 Four,
99 Cat1,
100 Cat2,
101 Cat3,
102 Cat4,
103 Cat5,
104 Cat6,
105 EOB,
106}
107
108fn expand_token(bc: &mut BoolCoder, token: DCTToken) -> i16 {
109 let cat;
110 match token {
111 DCTToken::Zero => return 0,
112 DCTToken::One => return if bc.read_bool() { -1 } else { 1 },
113 DCTToken::Two => return if bc.read_bool() { -2 } else { 2 },
114 DCTToken::Three => return if bc.read_bool() { -3 } else { 3 },
115 DCTToken::Four => return if bc.read_bool() { -4 } else { 4 },
116 DCTToken::Cat1 => cat = 0,
117 DCTToken::Cat2 => cat = 1,
118 DCTToken::Cat3 => cat = 2,
119 DCTToken::Cat4 => cat = 3,
120 DCTToken::Cat5 => cat = 4,
121 DCTToken::Cat6 => cat = 5,
122 _ => unreachable!(),
123 };
124 let mut add = 0i16;
125 let add_probs = &VP56_COEF_ADD_PROBS[cat];
126 for prob in add_probs.iter() {
127 if *prob == 128 { break; }
128 add = (add << 1) | (bc.read_prob(*prob) as i16);
129 }
130 let sign = bc.read_bool();
131 let level = VP56_COEF_BASE[cat] + add;
132 if !sign {
133 level
134 } else {
135 -level
136 }
137}
138
139struct SBParams<'a> {
140 coef_probs: &'a [[[[u8; 11]; 3]; 8]; 4],
141 scan: &'a [usize; 16],
142 qmat: &'a [i16; 16],
143}
144
145fn decode_subblock<'a>(bc: &mut BoolCoder, coeffs: &mut [i16; 16], ctype: usize, pctx: u8, sbparams: &SBParams) -> u8 {
146 const COEF_BANDS: [usize; 16] = [ 0, 1, 2, 3, 6, 4, 5, 6, 6, 6, 6, 6, 6, 6, 6, 7 ];
147
148 let mut has_nz = 0;
149 let start = if ctype != 0 { 0 } else { 1 };
150 *coeffs = [0; 16];
151 let mut cval = pctx as usize;
152 for idx in start..16 {
153 let probs = &sbparams.coef_probs[ctype][COEF_BANDS[idx]][cval];
154 let tok = bc.read_tree(COEF_TREE, probs);
155 if tok == DCTToken::EOB { break; }
156 let level = expand_token(bc, tok);
157 coeffs[sbparams.scan[idx]] = level.wrapping_mul(sbparams.qmat[idx]);
158 cval = level.abs().min(2) as usize;
159 has_nz |= cval;
160 }
161 if has_nz > 0 { 1 } else { 0 }
162}
163
164#[derive(Clone,Copy,Default)]
165struct MBInfo {
166 mb_type: VPMBType,
167 ymode: PredMode,
168 uvmode: PredMode,
169 loop_str: u8,
170 upd_gf: bool,
171}
172
173#[derive(Default)]
174struct DecoderState {
175 features: [Option<MBFeature>; 4],
176
177 fading: bool,
178 fade_alpha: u16,
179 fade_beta: u16,
180
181 lf_simple: bool,
182 loop_filter_level: u8,
183 loop_sharpness: u8,
184
185 is_intra: bool,
186 version: u8,
187
188 kf_ymode_prob: [u8; 4],
189 kf_uvmode_prob: [u8; 3],
190
191 prob_intra_pred: u8,
192 prob_last_pred: u8,
193
194 coef_probs: [[[[u8; 11]; 3]; 8]; 4],
195 mv_probs: [[u8; 17]; 2],
196
197 force_quant: Option<u8>,
198 force_loop_str: Option<u8>,
199 force_gf_update: bool,
200 force_pitch: Option<u8>,
201
202 has_y2: bool,
203 pdc_pred_val: i16,
204 pdc_pred_count: usize,
205
206 ipred_ctx_y: IPredContext,
207 ipred_ctx_u: IPredContext,
208 ipred_ctx_v: IPredContext,
209}
210
211impl DecoderState {
212 fn reset(&mut self) {
213 self.kf_ymode_prob.copy_from_slice(Y_MODE_TREE_PROBS);
214 self.kf_uvmode_prob.copy_from_slice(UV_MODE_TREE_PROBS);
215 self.coef_probs.copy_from_slice(&DEFAULT_DCT_PROBS);
216 self.mv_probs.copy_from_slice(&DEFAULT_MV_PROBS);
217 }
218}
219
220#[derive(Clone,Copy,Debug,PartialEq)]
221enum MVSplitMode {
222 TopBottom,
223 LeftRight,
224 Quarters,
225 Sixteenths,
226}
227
228#[derive(Clone,Copy,Debug,PartialEq)]
229enum SubMVRef {
230 Left,
231 Above,
232 New,
233 Zero,
234}
235
236fn decode_mv_component(bc: &mut BoolCoder, probs: &[u8; 17]) -> i16 {
237 const LONG_VECTOR_ORDER: [usize; 7] = [ 0, 1, 2, 7, 6, 5, 4 ];
238
239 let val = if !bc.read_prob(probs[0]) {
240 bc.read_tree(SMALL_MV_TREE, &probs[2..9])
241 } else {
242 let raw_probs = &probs[9..];
243 let mut raw = 0;
244 for ord in LONG_VECTOR_ORDER.iter() {
245 raw |= (bc.read_prob(raw_probs[*ord]) as i16) << *ord;
246 }
247 if (raw & 0xF0) != 0 {
248 raw |= (bc.read_prob(raw_probs[3]) as i16) << 3;
249 } else {
250 raw |= 1 << 3;
251 }
252 raw
253 };
254 if (val == 0) || !bc.read_prob(probs[1]) {
255 val
256 } else {
257 -val
258 }
259}
260
261struct PredCache {
262 y_pred: GenericCache<u8>,
263 u_pred: GenericCache<u8>,
264 v_pred: GenericCache<u8>,
265 y2_pred: GenericCache<u8>,
266 y_pred_left: [u8; 4],
267 u_pred_left: [u8; 2],
268 v_pred_left: [u8; 2],
269 y2_pred_left: u8,
270}
271
272impl PredCache {
273 fn new() -> Self {
274 Self {
275 y_pred: GenericCache::new(1, 1, 0),
276 u_pred: GenericCache::new(1, 1, 0),
277 v_pred: GenericCache::new(1, 1, 0),
278 y2_pred: GenericCache::new(1, 1, 0),
279 y_pred_left: [0; 4],
280 u_pred_left: [0; 2],
281 v_pred_left: [0; 2],
282 y2_pred_left: 0,
283 }
284 }
285 fn resize(&mut self, mb_w: usize) {
286 self.y_pred = GenericCache::new(4, mb_w * 4 + 1, 0);
287 self.u_pred = GenericCache::new(2, mb_w * 2 + 1, 0);
288 self.v_pred = GenericCache::new(2, mb_w * 2 + 1, 0);
289 self.y2_pred = GenericCache::new(1, mb_w + 1, 0);
290 }
291 fn reset(&mut self) {
292 self.y_pred.reset();
293 self.u_pred.reset();
294 self.v_pred.reset();
295 self.y2_pred.reset();
296 self.y_pred_left = [0; 4];
297 self.u_pred_left = [0; 2];
298 self.v_pred_left = [0; 2];
299 self.y2_pred_left = 0;
300 }
301 fn update_row(&mut self) {
302 self.y_pred.update_row();
303 self.u_pred.update_row();
304 self.v_pred.update_row();
305 self.y2_pred.update_row();
306 }
307}
308
309struct VP7Decoder {
310 info: NACodecInfoRef,
311
312 shuf: VPShuffler,
313 width: usize,
314 height: usize,
315 mb_w: usize,
316 mb_h: usize,
317 mb_info: Vec<MBInfo>,
318 mvs: Vec<MV>,
319 mv_stride: usize,
320
321 ymodes: Vec<PredMode>,
322 ymode_stride: usize,
323 uvmodes: Vec<PredMode>,
324 uvmode_stride: usize,
325
326 dstate: DecoderState,
327 pcache: PredCache,
328
329 coeffs: [[i16; 16]; 25],
330 scan: [usize; 16],
331 qmat: [[[i16; 16]; 3]; 5],
332
333 mc_buf: NAVideoBufferRef<u8>,
334
335 tmp_scan: [usize; 16],
336}
337
338impl VP7Decoder {
339 fn new() -> Self {
340 let vt = alloc_video_buffer(NAVideoInfo::new(128, 128, false, YUV420_FORMAT), 4).unwrap();
341 let mut scan = [0; 16];
342 scan.copy_from_slice(&DEFAULT_SCAN_ORDER);
343 let mc_buf = vt.get_vbuf().unwrap();
344 Self {
345 info: NACodecInfoRef::default(),
346
347 shuf: VPShuffler::new(),
348 width: 0,
349 height: 0,
350 mb_w: 0,
351 mb_h: 0,
352 mb_info: Vec::new(),
353 mvs: Vec::new(),
354 mv_stride: 0,
355
356 ymodes: Vec::new(),
357 ymode_stride: 0,
358 uvmodes: Vec::new(),
359 uvmode_stride: 0,
360
361 dstate: DecoderState::default(),
362 pcache: PredCache::new(),
363
364 coeffs: [[0; 16]; 25],
365 scan,
366 tmp_scan: [0; 16],
367 qmat: [[[0; 16]; 3]; 5],
368
369 mc_buf,
370 }
371 }
372 fn set_dimensions(&mut self, width: usize, height: usize) {
373 if (width == self.width) && (height == self.height) {
374 return;
375 }
376 self.width = width;
377 self.height = height;
378 self.mb_w = (self.width + 15) >> 4;
379 self.mb_h = (self.height + 15) >> 4;
380 self.mb_info.resize(self.mb_w * self.mb_h, MBInfo::default());
381 self.mv_stride = self.mb_w * 4;
382 self.mvs.resize(self.mv_stride * self.mb_h * 4, ZERO_MV);
383
384 self.ymode_stride = self.mb_w * 4;
385 self.uvmode_stride = self.mb_w;
386 self.ymodes.resize(self.ymode_stride * self.mb_h * 4, PredMode::default());
387 self.uvmodes.resize(self.uvmode_stride * self.mb_h, PredMode::default());
388
389 self.pcache.resize(self.mb_w);
390 }
391 fn read_features(&mut self, bc: &mut BoolCoder) -> DecoderResult<()> {
392 for (i, feat) in self.dstate.features.iter_mut().enumerate() {
393 if bc.read_bool() {
394 let mut feature = MBFeature::default();
395 feature.present_prob = bc.read_byte();
396 for tp in feature.tree_probs.iter_mut() {
397 if bc.read_bool() {
398 *tp = bc.read_byte();
399 } else {
400 *tp = 255;
401 }
402 }
403 if i != 2 {
404 let fbits = match i {
405 0 => 7,
406 1 => 6,
407 _ => if self.dstate.version == 0 { 8 } else { 5 },
408 };
409 for dval in feature.def_val.iter_mut() {
410 if bc.read_bool() {
411 *dval = bc.read_bits(fbits) as u8;
412 } else {
413 *dval = 0;
414 }
415 }
416 }
417 *feat = Some(feature);
418 } else {
419 *feat = None;
420 }
421 }
422 Ok(())
423 }
424 fn read_dct_coef_prob_upd(&mut self, bc: &mut BoolCoder) -> DecoderResult<()> {
425 for i in 0..4 {
426 for j in 0..8 {
427 for k in 0..3 {
428 for l in 0..11 {
429 if bc.read_prob(DCT_UPDATE_PROBS[i][j][k][l]) {
430 self.dstate.coef_probs[i][j][k][l] = bc.read_byte();
431 }
432 }
433 }
434 }
435 }
436 Ok(())
437 }
438 fn read_mv_prob_upd(&mut self, bc: &mut BoolCoder) -> DecoderResult<()> {
439 for comp in 0..2 {
440 for i in 0..17 {
441 if bc.read_prob(MV_UPDATE_PROBS[comp][i]) {
442 self.dstate.mv_probs[comp][i] = bc.read_probability();
443 }
444 }
445 }
446 Ok(())
447 }
448 fn decode_mb_features(&mut self, bc: &mut BoolCoder, _mb_x: usize, _mb_y: usize) -> DecoderResult<()> {
449 self.dstate.force_quant = None;
450 self.dstate.force_loop_str = None;
451 self.dstate.force_gf_update = false;
452 self.dstate.force_pitch = None;
453 for (i, feat) in self.dstate.features.iter().enumerate() {
454 if let Some(feat) = feat {
455 let present = bc.read_prob(feat.present_prob);
456 if present {
457 let ftype_idx = bc.read_tree(FEATURE_TREE, &feat.tree_probs);
458 let val = feat.def_val[ftype_idx];
459 match i {
460 0 => self.dstate.force_quant = Some(ftype_idx as u8),
461 1 => self.dstate.force_loop_str = Some(val),
462 2 => self.dstate.force_gf_update = true,
463 _ => self.dstate.force_pitch = Some(val),
464 };
465 }
466 }
467 }
468 Ok(())
469 }
470 fn decode_residue(&mut self, bc: &mut BoolCoder, mb_x: usize, mb_idx: usize) {
471 let qmat_idx = if let Some(idx) = self.dstate.force_quant { (idx as usize) + 1 } else { 0 };
472 let mut sbparams = SBParams {
473 scan: &DEFAULT_SCAN_ORDER,
474 qmat: &self.qmat[qmat_idx][2],
475 coef_probs: &self.dstate.coef_probs,
476 };
477 let mut has_ac = [false; 25];
478 let ytype;
479 if self.dstate.has_y2 {
480 let pred = &self.pcache.y2_pred;
481 let pidx = pred.xpos + mb_x;
482 let pctx = self.pcache.y2_pred_left + pred.data[pidx - pred.stride];
483
484 let has_nz = decode_subblock(bc, &mut self.coeffs[24], 1, pctx, &sbparams);
485 self.pcache.y2_pred.data[pidx] = has_nz;
486 self.pcache.y2_pred_left = has_nz;
487 has_ac[24] = has_nz > 0;
488
489 ytype = 0;
490 } else {
491 let pred = &mut self.pcache.y2_pred;
492 let pidx = pred.xpos + mb_x;
493 pred.data[pidx] = pred.data[pidx - pred.stride];
494
495 ytype = 3;
496 }
497 sbparams.scan = &self.scan;
498 sbparams.qmat = &self.qmat[qmat_idx][0];
499 for i in 0..16 {
500 let bx = i & 3;
501 let by = i >> 2;
502 let pred = &self.pcache.y_pred;
503 let pidx = pred.xpos + mb_x * 4 + bx + by * pred.stride;
504 let pctx = self.pcache.y_pred_left[by] + pred.data[pidx - pred.stride];
505
506 let has_nz = decode_subblock(bc, &mut self.coeffs[i], ytype, pctx, &sbparams);
507 self.pcache.y_pred.data[pidx] = has_nz;
508 self.pcache.y_pred_left[by] = has_nz;
509 has_ac[i] = has_nz > 0;
510 }
511 sbparams.qmat = &self.qmat[qmat_idx][1];
512 for i in 16..20 {
513 let bx = i & 1;
514 let by = (i >> 1) & 1;
515 let pred = &self.pcache.u_pred;
516 let pidx = pred.xpos + mb_x * 2 + bx + by * pred.stride;
517 let pctx = self.pcache.u_pred_left[by] + pred.data[pidx - pred.stride];
518
519 let has_nz = decode_subblock(bc, &mut self.coeffs[i], 2, pctx, &sbparams);
520 self.pcache.u_pred.data[pidx] = has_nz;
521 self.pcache.u_pred_left[by] = has_nz;
522 has_ac[i] = has_nz > 0;
523 }
524 for i in 20..24 {
525 let bx = i & 1;
526 let by = (i >> 1) & 1;
527 let pred = &self.pcache.v_pred;
528 let pidx = pred.xpos + mb_x * 2 + bx + by * pred.stride;
529 let pctx = self.pcache.v_pred_left[by] + pred.data[pidx - pred.stride];
530
531 let has_nz = decode_subblock(bc, &mut self.coeffs[i], 2, pctx, &sbparams);
532 self.pcache.v_pred.data[pidx] = has_nz;
533 self.pcache.v_pred_left[by] = has_nz;
534 has_ac[i] = has_nz > 0;
535 }
536
537 if self.dstate.has_y2 {
538 let y2block = &mut self.coeffs[24];
539 if self.mb_info[mb_idx].mb_type != VPMBType::Intra {
540 let mut dc = y2block[0];
541 let pval = self.dstate.pdc_pred_val;
542 if self.dstate.pdc_pred_count > 3 {
543 dc += pval;
544 y2block[0] = dc;
545 }
546 if (pval == 0) || (dc == 0) || ((pval ^ dc) < 0) {
587a6d78
KS
547 self.dstate.pdc_pred_count = 0;
548 } else if dc == pval {
549 self.dstate.pdc_pred_count += 1;
550 }
f7125215 551 self.dstate.pdc_pred_val = dc;
587a6d78
KS
552 }
553 if has_ac[24] {
554 idct4x4(y2block);
4726ca2c 555 } else if y2block[0] != 0 {
587a6d78
KS
556 idct4x4_dc(y2block);
557 }
558 for i in 0..16 {
559 self.coeffs[i][0] = self.coeffs[24][i];
560 }
561 }
562 for i in 0..24 {
563 if has_ac[i] {
564 idct4x4(&mut self.coeffs[i]);
4726ca2c 565 } else if self.coeffs[i][0] != 0 {
587a6d78
KS
566 idct4x4_dc(&mut self.coeffs[i]);
567 }
568 }
569 }
570
571 fn set_qmat(&mut self, y_dc_q: usize, y_ac_q: usize, y2_dc_q: usize, y2_ac_q: usize, uv_dc_q: usize, uv_ac_q: usize) {
572 self.qmat[0][0][0] = Y_DC_QUANTS[y_dc_q];
573 for i in 1..16 {
574 self.qmat[0][0][i] = Y_AC_QUANTS[y_ac_q];
575 }
576 self.qmat[0][1][0] = UV_DC_QUANTS[uv_dc_q];
577 for i in 1..16 {
578 self.qmat[0][1][i] = UV_AC_QUANTS[uv_ac_q];
579 }
580 self.qmat[0][2][0] = Y2_DC_QUANTS[y2_dc_q];
581 for i in 1..16 {
582 self.qmat[0][2][i] = Y2_AC_QUANTS[y2_ac_q];
583 }
584 if let Some(ref feat) = self.dstate.features[0] {
585 for j in 0..4 {
586 let q = feat.def_val[j] as usize;
587 self.qmat[j + 1][0][0] = Y_DC_QUANTS[q];
588 for i in 1..16 {
589 self.qmat[j + 1][0][i] = Y_AC_QUANTS[q];
590 }
591 self.qmat[j + 1][1][0] = UV_DC_QUANTS[q];
592 for i in 1..16 {
593 self.qmat[j + 1][1][i] = UV_AC_QUANTS[q];
594 }
595 self.qmat[j + 1][2][0] = Y2_DC_QUANTS[q];
596 for i in 1..16 {
597 self.qmat[j + 1][2][i] = Y2_AC_QUANTS[q];
598 }
599 }
600 }
601 }
602 fn fill_ymode(&mut self, mb_x: usize, mb_y: usize, ymode: PredMode) {
603 let mut iidx = mb_x * 4 + mb_y * 4 * self.ymode_stride;
604 for _ in 0..4 {
605 for x in 0..4 {
606 self.ymodes[iidx + x] = ymode;
607 }
608 iidx += self.ymode_stride;
609 }
610 }
611 fn fill_mv(&mut self, mb_x: usize, mb_y: usize, mv: MV) {
612 let mut iidx = mb_x * 4 + mb_y * 4 * self.mv_stride;
613 for _ in 0..4 {
614 for x in 0..4 {
615 self.mvs[iidx + x] = mv;
616 }
617 iidx += self.mb_w * 4;
618 }
619 }
620 fn find_mv_pred(&self, mb_x: usize, mb_y: usize) -> ([u8; 4], MV, MV, MV) {
621 const CAND_POS: [(i8, i8, u8, u8); 12] = [
622 (-1, 0, 8, 12), ( 0, -1, 8, 3),
623 (-1, -1, 2, 15), (-1, 1, 2, 12),
624 (-2, 0, 2, 12), ( 0, -2, 2, 3),
625 (-1, -2, 1, 15), (-2, -1, 1, 15),
626 (-2, 1, 1, 12), (-1, 2, 1, 12),
627 (-2, -2, 1, 15), (-2, 2, 1, 12)
628 ];
629
630 let mut nearest_mv = ZERO_MV;
631 let mut near_mv = ZERO_MV;
632
633 let mut ct: [u8; 4] = [0; 4];
634
635 let start = if self.dstate.version == 0 { 1 } else { 0 };
636 let mvwrap = (self.mb_w as isize) + 1;
637 for (yoff, xoff, weight, blk_no) in CAND_POS.iter() {
638 let cx = (mb_x as isize) + (*xoff as isize);
639 let cy = (mb_y as isize) + (*yoff as isize);
640 let mvpos = cx + cy * mvwrap;
641 if (mvpos < start) || ((mvpos % mvwrap) == (mvwrap - 1)) {
642 ct[0] += weight;
643 continue;
644 }
645 let cx = (mvpos % mvwrap) as usize;
646 let cy = (mvpos / mvwrap) as usize;
647 let bx = (*blk_no as usize) & 3;
648 let by = (*blk_no as usize) >> 2;
649 let blk_pos = cx * 4 + bx + (cy * 4 + by) * self.mv_stride;
650 let mv = self.mvs[blk_pos];
651 if mv == ZERO_MV {
652 ct[0] += weight;
653 continue;
654 }
655 let idx;
656 if (nearest_mv == ZERO_MV) || (nearest_mv == mv) {
657 nearest_mv = mv;
658 idx = 1;
659 } else if near_mv == ZERO_MV {
660 near_mv = mv;
661 idx = 2;
662 } else {
663 idx = if mv == near_mv { 2 } else { 3 };
664 }
665 ct[idx] += weight;
666 }
667 let pred_mv = if ct[1] > ct[2] {
668 if ct[1] >= ct[0] { nearest_mv } else { ZERO_MV }
669 } else {
670 if ct[2] >= ct[0] { near_mv } else { ZERO_MV }
671 };
672
673 let mvprobs = [INTER_MODE_PROBS[ct[0] as usize][0],
674 INTER_MODE_PROBS[ct[1] as usize][1],
675 INTER_MODE_PROBS[ct[2] as usize][2],
676 INTER_MODE_PROBS[ct[2] as usize][3]];
677
678 (mvprobs, nearest_mv, near_mv, pred_mv)
679 }
680 fn get_split_mv(&self, bc: &mut BoolCoder, mb_x: usize, mb_y: usize, bx: usize, by: usize, pred_mv: MV) -> MV {
681 let mode = bc.read_tree(SUB_MV_REF_TREE, &SUB_MV_REF_PROBS);
682 let mvidx = mb_x * 4 + bx + (mb_y * 4 + by) * self.mv_stride;
683 match mode {
684 SubMVRef::Left => {
685 if (mb_x > 0) || (bx > 0) {
686 self.mvs[mvidx - 1]
687 } else {
688 ZERO_MV
689 }
690 },
691 SubMVRef::Above => {
692 if (mb_y > 0) || (by > 0) {
693 self.mvs[mvidx - self.mv_stride]
694 } else {
695 ZERO_MV
696 }
697 },
698 SubMVRef::Zero => ZERO_MV,
699 SubMVRef::New => {
700 let dmy = decode_mv_component(bc, &self.dstate.mv_probs[0]);
701 let dmx = decode_mv_component(bc, &self.dstate.mv_probs[1]);
702 pred_mv + MV{ x: dmx, y: dmy }
703 },
704 }
705 }
706 fn do_split_mv(&mut self, bc: &mut BoolCoder, mb_x: usize, mb_y: usize, pred_mv: MV) -> DecoderResult<()> {
707 let split_mode = bc.read_tree(MV_SPLIT_MODE_TREE, &MV_SPLIT_MODE_PROBS);
708 let mut mvidx = mb_x * 4 + mb_y * 4 * self.mv_stride;
709 match split_mode {
710 MVSplitMode::TopBottom => {
711 let top_mv = self.get_split_mv(bc, mb_x, mb_y, 0, 0, pred_mv);
712 for _ in 0..2 {
713 for x in 0..4 { self.mvs[mvidx + x] = top_mv; }
714 mvidx += self.mv_stride;
715 }
716 let bot_mv = self.get_split_mv(bc, mb_x, mb_y, 0, 2, pred_mv);
717 for _ in 2..4 {
718 for x in 0..4 { self.mvs[mvidx + x] = bot_mv; }
719 mvidx += self.mv_stride;
720 }
721 },
722 MVSplitMode::LeftRight => {
723 let left_mv = self.get_split_mv(bc, mb_x, mb_y, 0, 0, pred_mv);
724 self.mvs[mvidx + 1] = left_mv;
725 let right_mv = self.get_split_mv(bc, mb_x, mb_y, 2, 0, pred_mv);
726 for _ in 0..4 {
727 self.mvs[mvidx + 0] = left_mv;
728 self.mvs[mvidx + 1] = left_mv;
729 self.mvs[mvidx + 2] = right_mv;
730 self.mvs[mvidx + 3] = right_mv;
731 mvidx += self.mv_stride;
732 }
733 },
734 MVSplitMode::Quarters => {
735 for y in (0..4).step_by(2) {
736 for x in (0..4).step_by(2) {
737 self.mvs[mvidx + x] = self.get_split_mv(bc, mb_x, mb_y, x, y, pred_mv);
738 self.mvs[mvidx + x + 1] = self.mvs[mvidx + x];
739 }
740 for x in 0..4 {
741 self.mvs[mvidx + x + self.mv_stride] = self.mvs[mvidx + x];
742 }
743 mvidx += self.mv_stride * 2;
744 }
745 },
746 MVSplitMode::Sixteenths => {
747 for y in 0..4 {
748 for x in 0..4 {
749 self.mvs[mvidx + x] = self.get_split_mv(bc, mb_x, mb_y, x, y, pred_mv);
750 }
751 mvidx += self.mv_stride;
752 }
753 },
754 };
755 Ok(())
756 }
757
758 fn add_residue(&self, dframe: &mut NASimpleVideoFrame<u8>, mb_x: usize, mb_y: usize, do_luma: bool, pitch_mode: u8) {
759 if do_luma {
760 let ydst = &mut dframe.data[dframe.offset[0]..];
761 let ystride = dframe.stride[0];
762 let mut yoff = mb_x * 16 + mb_y * 16 * ystride;
763 match pitch_mode {
764 PITCH_MODE_NORMAL => {
765 for y in 0..4 {
766 for x in 0..4 {
767 add_coeffs4x4(ydst, yoff + x * 4, ystride, &self.coeffs[x + y * 4]);
768 }
769 yoff += 4 * ystride;
770 }
771 },
772 PITCH_MODE_FOUR => {
773 for y in 0..16 {
774 add_coeffs16x1(ydst, yoff, &self.coeffs[y]);
775 yoff += ystride;
776 }
777 },
778 PITCH_MODE_X2 => {
779 for y in 0..2 {
780 for x in 0..4 {
781 add_coeffs4x4(ydst, yoff + x * 4, ystride * 2, &self.coeffs[x + y * 4]);
782 }
783 yoff += 8 * ystride;
784 }
785 yoff -= 15 * ystride;
786 for y in 2..4 {
787 for x in 0..4 {
788 add_coeffs4x4(ydst, yoff + x * 4, ystride * 2, &self.coeffs[x + y * 4]);
789 }
790 yoff += 8 * ystride;
791 }
792 },
793 PITCH_MODE_X4 => {
794 for y in 0..4 {
795 for x in 0..4 {
796 add_coeffs4x4(ydst, yoff + x * 4, ystride * 4, &self.coeffs[x + y * 4]);
797 }
798 yoff += ystride;
799 }
800 },
801 _ => unreachable!(),
802 };
803 }
804 let dst = &mut dframe.data[0..];
805 let mut uoff = dframe.offset[1] + mb_x * 8 + mb_y * 8 * dframe.stride[1];
806 let ustride = dframe.stride[1];
807 let mut voff = dframe.offset[2] + mb_x * 8 + mb_y * 8 * dframe.stride[2];
808 let vstride = dframe.stride[2];
809 if (pitch_mode == PITCH_MODE_NORMAL) || (pitch_mode == PITCH_MODE_FOUR) {
810 for y in 0..2 {
811 for x in 0..2 {
812 add_coeffs4x4(dst, uoff + x * 4, ustride, &self.coeffs[16 + x + y * 2]);
813 add_coeffs4x4(dst, voff + x * 4, vstride, &self.coeffs[20 + x + y * 2]);
814 }
815 uoff += ustride * 4;
816 voff += vstride * 4;
817 }
818 } else {
819 for y in 0..2 {
820 for x in 0..2 {
821 add_coeffs4x4(dst, uoff + x * 4, ustride * 2, &self.coeffs[16 + x + y * 2]);
822 add_coeffs4x4(dst, voff + x * 4, vstride * 2, &self.coeffs[20 + x + y * 2]);
823 }
824 uoff += ustride;
825 voff += vstride;
826 }
827 }
828 }
829 fn recon_intra_mb(&mut self, dframe: &mut NASimpleVideoFrame<u8>, mb_x: usize, mb_y: usize) -> DecoderResult<()> {
830 let pitch = self.dstate.force_pitch.unwrap_or(0);
831 let pitch_mode = (pitch >> 3) & 3;
832
833 let mb_idx = mb_x + mb_y * self.mb_w;
834 let has_top = mb_y > 0;
835 let has_left = mb_x > 0;
836 let ydst = &mut dframe.data[dframe.offset[0]..];
837 let ystride = dframe.stride[0];
838 let mut yoff = mb_x * 16 + mb_y * 16 * ystride;
839 let ipred_ctx_y = &mut self.dstate.ipred_ctx_y;
840 ipred_ctx_y.has_top = has_top;
841 ipred_ctx_y.has_left = has_left;
842 let is_normal = self.mb_info[mb_idx].ymode != PredMode::BPred;
843 if is_normal {
844 ipred_ctx_y.fill(ydst, yoff, ystride, 16, 16);
845 match self.mb_info[mb_idx].ymode {
846 PredMode::DCPred => IPred16x16::ipred_dc(ydst, yoff, ystride, ipred_ctx_y),
847 PredMode::HPred => IPred16x16::ipred_h (ydst, yoff, ystride, ipred_ctx_y),
848 PredMode::VPred => IPred16x16::ipred_v (ydst, yoff, ystride, ipred_ctx_y),
849 PredMode::TMPred => IPred16x16::ipred_tm(ydst, yoff, ystride, ipred_ctx_y),
850 _ => unreachable!(),
851 };
852 } else {
853 validate!((pitch_mode == PITCH_MODE_NORMAL) || (pitch_mode == PITCH_MODE_X2));
854 let mut iidx = mb_x * 4 + mb_y * 4 * self.ymode_stride;
855 let mut tr_save = [0x80u8; 16];
856 if pitch_mode == PITCH_MODE_X2 {
857 // reorganise coefficient data for interlaced case
858 for y in (0..4).step_by(2) {
859 for x in 0..4 {
860 let mut tmpblock = [0i16; 16 * 2];
861 let eidx = x + y * 4;
862 let oidx = x + y * 4 + 4;
863 for i in 0..4 {
864 for j in 0..4 {
865 tmpblock[i * 8 + 0 + j] = self.coeffs[eidx][i * 4 + j];
866 tmpblock[i * 8 + 4 + j] = self.coeffs[oidx][i * 4 + j];
867 }
868 }
869 self.coeffs[eidx].copy_from_slice(&tmpblock[0..16]);
870 self.coeffs[oidx].copy_from_slice(&tmpblock[16..32]);
871 }
872 }
873 }
44901bc9 874 let tr_edge = if has_top { ydst[yoff - ystride + 15] } else { 0x80 };
587a6d78
KS
875 for y in 0..4 {
876 for x in 0..4 {
877 ipred_ctx_y.has_left = has_left || x > 0;
878 let bmode = self.ymodes[iidx + x];
879 let cur_yoff = yoff + x * 4;
44901bc9
KS
880 let has_tr = ipred_ctx_y.has_top && ((x < 3) || ((y == 0) && (mb_y < self.mb_w - 1)));
881 let has_dl = ipred_ctx_y.has_left && (x == 0) && (y < 3);
587a6d78
KS
882 ipred_ctx_y.fill(ydst, cur_yoff, ystride,
883 if has_tr { 8 } else { 4 },
884 if has_dl { 8 } else { 4 });
885 if !has_tr {
886 for i in 0..4 {
887 ipred_ctx_y.top[i + 4] = tr_save[x * 4 + i];
888 }
889 } else {
890 for i in 0..4 {
891 tr_save[x * 4 + i] = ipred_ctx_y.top[i + 4];
892 }
893 }
44901bc9
KS
894 if (mb_x == self.mb_w - 1) && has_top && (x == 3) {
895 for i in 0..4 {
896 ipred_ctx_y.top[i + 4] = tr_edge;
897 }
898 }
587a6d78
KS
899 match bmode {
900 PredMode::DCPred => IPred4x4::ipred_dc(ydst, cur_yoff, ystride, ipred_ctx_y),
901 PredMode::TMPred => IPred4x4::ipred_tm(ydst, cur_yoff, ystride, ipred_ctx_y),
902 PredMode::HPred => IPred4x4::ipred_he(ydst, cur_yoff, ystride, ipred_ctx_y),
903 PredMode::VPred => IPred4x4::ipred_ve(ydst, cur_yoff, ystride, ipred_ctx_y),
904 PredMode::LDPred => IPred4x4::ipred_ld(ydst, cur_yoff, ystride, ipred_ctx_y),
905 PredMode::RDPred => IPred4x4::ipred_rd(ydst, cur_yoff, ystride, ipred_ctx_y),
906 PredMode::VRPred => IPred4x4::ipred_vr(ydst, cur_yoff, ystride, ipred_ctx_y),
907 PredMode::VLPred => IPred4x4::ipred_vl(ydst, cur_yoff, ystride, ipred_ctx_y),
908 PredMode::HDPred => IPred4x4::ipred_hd(ydst, cur_yoff, ystride, ipred_ctx_y),
909 PredMode::HUPred => IPred4x4::ipred_hu(ydst, cur_yoff, ystride, ipred_ctx_y),
910 _ => unreachable!(),
911 };
912 add_coeffs4x4(ydst, cur_yoff, ystride, &self.coeffs[x + y * 4]);
913 }
914 ipred_ctx_y.has_top = true;
915 yoff += 4 * ystride;
916 iidx += self.ymode_stride;
917 }
918 }
919 let dst = &mut dframe.data[0..];
920 let uoff = dframe.offset[1] + mb_x * 8 + mb_y * 8 * dframe.stride[1];
921 let ustride = dframe.stride[1];
922 let voff = dframe.offset[2] + mb_x * 8 + mb_y * 8 * dframe.stride[2];
923 let vstride = dframe.stride[2];
924 let ipred_ctx_u = &mut self.dstate.ipred_ctx_u;
925 let ipred_ctx_v = &mut self.dstate.ipred_ctx_v;
926 ipred_ctx_u.has_top = has_top;
927 ipred_ctx_v.has_top = has_top;
928 ipred_ctx_u.has_left = has_left;
929 ipred_ctx_v.has_left = has_left;
930 ipred_ctx_u.fill(dst, uoff, ustride, 8, 8);
931 ipred_ctx_v.fill(dst, voff, vstride, 8, 8);
932 match self.mb_info[mb_idx].uvmode {
933 PredMode::DCPred => {
934 IPred8x8::ipred_dc(dst, uoff, ustride, ipred_ctx_u);
935 IPred8x8::ipred_dc(dst, voff, vstride, ipred_ctx_v);
936 },
937 PredMode::HPred => {
938 IPred8x8::ipred_h(dst, uoff, ustride, ipred_ctx_u);
939 IPred8x8::ipred_h(dst, voff, vstride, ipred_ctx_v);
940 },
941 PredMode::VPred => {
942 IPred8x8::ipred_v(dst, uoff, ustride, ipred_ctx_u);
943 IPred8x8::ipred_v(dst, voff, vstride, ipred_ctx_v);
944 },
945 PredMode::TMPred => {
946 IPred8x8::ipred_tm(dst, uoff, ustride, ipred_ctx_u);
947 IPred8x8::ipred_tm(dst, voff, vstride, ipred_ctx_v);
948 },
949 _ => unreachable!(),
950 };
951 self.add_residue(dframe, mb_x, mb_y, is_normal, pitch_mode);
952 Ok(())
953 }
954 fn recon_inter_mb(&mut self, dframe: &mut NASimpleVideoFrame<u8>, mb_x: usize, mb_y: usize, use_last: bool) {
955 let pitch = self.dstate.force_pitch.unwrap_or(0);
956 let pitch_dmode = (pitch >> 3) & 3;
957 let pitch_smode = pitch & 7;
958
959 let refframe = (if use_last { self.shuf.get_last() } else { self.shuf.get_golden() }).unwrap();
960 let single_mv = self.mb_info[mb_x + mb_y * self.mb_w].mb_type != VPMBType::InterFourMV;
961 let mut iidx = mb_x * 4 + mb_y * 4 * self.mv_stride;
962 let mut mc_buf = self.mc_buf.get_data_mut().unwrap();
963
964 let dst = &mut dframe.data[0..];
965 let ystride = dframe.stride[0];
966 let mut yoff = dframe.offset[0] + mb_x * 16 + mb_y * 16 * ystride;
967 if pitch_smode == 0 {
968 if single_mv {
969 mc_block16x16(dst, yoff, ystride, mb_x * 16, mb_y * 16,
970 self.mvs[iidx].x * 2, self.mvs[iidx].y * 2, refframe.clone(), 0, &mut mc_buf);
971 } else {
972 for y in 0..4 {
973 for x in 0..4 {
974 mc_block4x4(dst, yoff + x * 4, ystride, mb_x * 16 + x * 4, mb_y * 16 + y * 4,
975 self.mvs[iidx + x].x * 2, self.mvs[iidx + x].y * 2, refframe.clone(), 0, &mut mc_buf);
976 }
977 yoff += 4 * ystride;
978 iidx += self.mv_stride;
979 }
980 }
981 } else {
982 if single_mv {
983 mc_block_special(dst, yoff, ystride, mb_x * 16, mb_y * 16,
984 self.mvs[iidx].x * 2, self.mvs[iidx].y * 2,
985 refframe.clone(), 0, &mut mc_buf, 16, pitch_smode);
986 } else {
987 for y in 0..4 {
988 for x in 0..4 {
989 mc_block_special(dst, yoff + x * 4, ystride,
990 mb_x * 16 + x * 4, mb_y * 16 + y * 4,
991 self.mvs[iidx + x].x * 2, self.mvs[iidx + x].y * 2,
992 refframe.clone(), 0, &mut mc_buf, 4, pitch_smode);
993 }
994 yoff += 4 * ystride;
995 iidx += self.mv_stride;
996 }
997 }
998 }
999
1000 let mut iidx = mb_x * 4 + mb_y * 4 * self.mv_stride;
1001 let mut uoff = dframe.offset[1] + mb_x * 8 + mb_y * 8 * dframe.stride[1];
1002 let ustride = dframe.stride[1];
1003 let mut voff = dframe.offset[2] + mb_x * 8 + mb_y * 8 * dframe.stride[2];
1004 let vstride = dframe.stride[2];
1005 if single_mv {
1006 let chroma_mv = self.mvs[iidx];
1007
1008 if pitch_smode == 0 {
1009 mc_block8x8(dst, uoff, ustride, mb_x * 8, mb_y * 8, chroma_mv.x, chroma_mv.y, refframe.clone(), 1, &mut mc_buf);
1010 mc_block8x8(dst, voff, vstride, mb_x * 8, mb_y * 8, chroma_mv.x, chroma_mv.y, refframe.clone(), 2, &mut mc_buf);
1011 } else {
1012 mc_block_special(dst, uoff, ustride, mb_x * 8, mb_y * 8, chroma_mv.x, chroma_mv.y,
1013 refframe.clone(), 1, &mut mc_buf, 8, pitch_smode);
1014 mc_block_special(dst, voff, vstride, mb_x * 8, mb_y * 8, chroma_mv.x, chroma_mv.y,
1015 refframe.clone(), 2, &mut mc_buf, 8, pitch_smode);
1016 }
1017 } else {
1018 for y in 0..2 {
1019 for x in 0..2 {
1020 let mut chroma_mv = self.mvs[iidx] + self.mvs[iidx + 1]
1021 + self.mvs[iidx + self.mv_stride]
1022 + self.mvs[iidx + self.mv_stride + 1];
1023 chroma_mv.x /= 4;
1024 chroma_mv.y /= 4;
1025
1026 if pitch_smode == 0 {
1027 mc_block4x4(dst, uoff, ustride, mb_x * 8 + x * 4, mb_y * 8 + y * 4,
1028 chroma_mv.x, chroma_mv.y, refframe.clone(), 1, &mut mc_buf);
1029 mc_block4x4(dst, voff, vstride, mb_x * 8 + x * 4, mb_y * 8 + y * 4,
1030 chroma_mv.x, chroma_mv.y, refframe.clone(), 2, &mut mc_buf);
1031 } else {
1032 mc_block_special(dst, uoff, ustride, mb_x * 8 + x * 4, mb_y * 8 + y * 4,
1033 chroma_mv.x, chroma_mv.y, refframe.clone(), 1, &mut mc_buf,
1034 4, pitch_smode);
1035 mc_block_special(dst, voff, vstride, mb_x * 8 + x * 4, mb_y * 8 + y * 4,
1036 chroma_mv.x, chroma_mv.y, refframe.clone(), 2, &mut mc_buf,
1037 4, pitch_smode);
1038 }
1039 }
1040 uoff += ustride * 4;
1041 voff += vstride * 4;
1042 iidx += 2 * self.mv_stride;
1043 }
1044 }
1045 self.add_residue(dframe, mb_x, mb_y, true, pitch_dmode);
1046 }
1047 fn loop_filter_mb(&mut self, dframe: &mut NASimpleVideoFrame<u8>, mb_x: usize, mb_y: usize, loop_str: u8) {
1048 const HIGH_EDGE_VAR_THR: [[u8; 64]; 2] = [
1049 [
1050 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1,
1051 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
1052 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3,
1053 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3
1054 ], [
1055 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1,
1056 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1057 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2,
1058 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2
1059 ]];
1060
1061 let edge_thr = (loop_str as i16) + 2;
1062 let luma_thr = loop_str as i16;
1063 let chroma_thr = (loop_str as i16) * 2;
1064 let inner_thr = if self.dstate.loop_sharpness == 0 {
1065 loop_str as i16
1066 } else {
1067 let bound1 = (9 - self.dstate.loop_sharpness) as i16;
1068 let shift = (self.dstate.loop_sharpness + 3) >> 2;
1069 ((loop_str as i16) >> shift).min(bound1)
1070 };
1071 let hev_thr = HIGH_EDGE_VAR_THR[if self.dstate.is_intra { 1 } else { 0 }][loop_str as usize] as i16;
1072
1073 let ystride = dframe.stride[0];
1074 let ustride = dframe.stride[1];
1075 let vstride = dframe.stride[2];
1076 let ypos = dframe.offset[0] + mb_x * 16 + mb_y * 16 * ystride;
1077 let upos = dframe.offset[1] + mb_x * 8 + mb_y * 8 * ustride;
1078 let vpos = dframe.offset[2] + mb_x * 8 + mb_y * 8 * vstride;
1079
1080 let (loop_edge, loop_inner) = if self.dstate.lf_simple {
1081 (simple_loop_filter as LoopFilterFunc, simple_loop_filter as LoopFilterFunc)
1082 } else {
1083 (normal_loop_filter_edge as LoopFilterFunc, normal_loop_filter_inner as LoopFilterFunc)
1084 };
1085
1086 if mb_x > 0 {
1087 loop_edge(dframe.data, ypos, 1, ystride, 16, edge_thr, inner_thr, hev_thr);
1088 loop_edge(dframe.data, upos, 1, ustride, 8, edge_thr, inner_thr, hev_thr);
1089 loop_edge(dframe.data, vpos, 1, vstride, 8, edge_thr, inner_thr, hev_thr);
1090 }
1091 if mb_y > 0 {
1092 loop_edge(dframe.data, ypos, ystride, 1, 16, edge_thr, inner_thr, hev_thr);
1093 loop_edge(dframe.data, upos, ustride, 1, 8, edge_thr, inner_thr, hev_thr);
1094 loop_edge(dframe.data, vpos, vstride, 1, 8, edge_thr, inner_thr, hev_thr);
1095 }
1096
1097 for y in 1..4 {
1098 loop_inner(dframe.data, ypos + y * 4 * ystride, ystride, 1, 16, luma_thr, inner_thr, hev_thr);
1099 }
1100 loop_inner(dframe.data, upos + 4 * ustride, ustride, 1, 8, chroma_thr, inner_thr, hev_thr);
1101 loop_inner(dframe.data, vpos + 4 * vstride, vstride, 1, 8, chroma_thr, inner_thr, hev_thr);
1102
1103 for x in 1..4 {
1104 loop_inner(dframe.data, ypos + x * 4, 1, ystride, 16, luma_thr, inner_thr, hev_thr);
1105 }
1106 loop_inner(dframe.data, upos + 4, 1, ustride, 8, chroma_thr, inner_thr, hev_thr);
1107 loop_inner(dframe.data, vpos + 4, 1, vstride, 8, chroma_thr, inner_thr, hev_thr);
1108 }
1109}
1110
1111impl NADecoder for VP7Decoder {
1112 fn init(&mut self, supp: &mut NADecoderSupport, info: NACodecInfoRef) -> DecoderResult<()> {
1113 if let NACodecTypeInfo::Video(vinfo) = info.get_properties() {
1114 let fmt = YUV420_FORMAT;
1115 let myvinfo = NAVideoInfo::new(vinfo.get_width(), vinfo.get_height(), false, fmt);
1116 let myinfo = NACodecTypeInfo::Video(myvinfo.clone());
1117 self.info = NACodecInfo::new_ref(info.get_name(), myinfo, info.get_extradata()).into_ref();
1118
1119 supp.pool_u8.set_dec_bufs(4);
1120 supp.pool_u8.prealloc_video(NAVideoInfo::new(myvinfo.get_width(), myvinfo.get_height(), false, vinfo.get_format()), 4)?;
1121 self.set_dimensions(myvinfo.get_width(), myvinfo.get_height());
1122 Ok(())
1123 } else {
1124 Err(DecoderError::InvalidData)
1125 }
1126 }
1127 fn decode(&mut self, supp: &mut NADecoderSupport, pkt: &NAPacket) -> DecoderResult<NAFrameRef> {
1128 let src = pkt.get_buffer();
1129
1130 validate!(src.len() > 4);
1131
1132 let frame_tag = read_u24le(src.as_slice())?;
1133 self.dstate.is_intra = (frame_tag & 1) == 0;
1134 self.dstate.version = ((frame_tag >> 1) & 7) as u8;
1135 let part2_off = (frame_tag >> 4) as usize;
1136 let part1_off = if self.dstate.version == 0 { 4 } else { 3 };
1137
1138 validate!(src.len() > part1_off + part2_off);
1139 let mut bc = BoolCoder::new(&src[part1_off..][..part2_off])?;
1140 let mut bc_main = BoolCoder::new(&src[part1_off + part2_off..])?;
1141 if self.dstate.is_intra {
1142 let width = bc.read_bits(12) as usize;
1143 let height = bc.read_bits(12) as usize;
1144 let _scalev = bc.read_bits(2);
1145 let _scaleh = bc.read_bits(2);
1146 validate!((width > 0) && (height > 0));
1147 self.set_dimensions(width, height);
1148
1149 self.dstate.reset();
6e24ec0b
KS
1150 } else {
1151 if !self.shuf.has_refs() {
1152 return Err(DecoderError::MissingReference);
1153 }
587a6d78
KS
1154 }
1155
1156 self.read_features(&mut bc)?;
1157
1158 let y_ac_q = bc.read_bits(7) as usize;
1159 let y_dc_q = if bc.read_bool() { bc.read_bits(7) as usize } else { y_ac_q };
1160 let y2_dc_q = if bc.read_bool() { bc.read_bits(7) as usize } else { y_ac_q };
1161 let y2_ac_q = if bc.read_bool() { bc.read_bits(7) as usize } else { y_ac_q };
1162 let uv_dc_q = if bc.read_bool() { bc.read_bits(7) as usize } else { y_ac_q };
1163 let uv_ac_q = if bc.read_bool() { bc.read_bits(7) as usize } else { y_ac_q };
1164 self.set_qmat(y_dc_q, y_ac_q, y2_dc_q, y2_ac_q, uv_dc_q, uv_ac_q);
1165
1166 let update_gf = if self.dstate.is_intra { true } else { bc.read_bool() };
1167
1168 let mut has_fading_feature = true;
1169 let mut keep_probs = true;
1170 if self.dstate.version != 0 {
1171 keep_probs = bc.read_bool();
1172 if self.dstate.is_intra {
1173 has_fading_feature = true;
1174 } else {
1175 has_fading_feature = bc.read_bool();
1176 }
1177 }
1178
1179 if has_fading_feature {
1180 self.dstate.fading = bc.read_bool();
1181 if self.dstate.fading {
1182 self.dstate.fade_alpha = bc.read_sbits(8) as u16;
1183 self.dstate.fade_beta = bc.read_sbits(8) as u16;
1184 if let Some(pframe) = self.shuf.get_last() {
1185 let mut fframe = supp.pool_u8.get_free().unwrap();
1186 let mut dframe = NASimpleVideoFrame::from_video_buf(&mut fframe).unwrap();
1187 fade_frame(pframe, &mut dframe, self.dstate.fade_alpha, self.dstate.fade_beta);
1188 self.shuf.add_frame(fframe);
1189 }
1190 }
1191 } else {
1192 self.dstate.fading = false;
1193 }
1194
1195 if self.dstate.version == 0 {
1196 self.dstate.lf_simple = bc.read_bool();
1197 }
1198
1199 if bc.read_bool() {
1200 for i in 1..16 {
1201 self.scan[i] = DEFAULT_SCAN_ORDER[bc.read_bits(4) as usize];
1202 }
1203 }
1204
1205 if self.dstate.version != 0 {
1206 self.dstate.lf_simple = bc.read_bool();
1207 } else {
1208 self.dstate.lf_simple = false;
1209 }
1210
1211 self.dstate.loop_filter_level = bc.read_bits(6) as u8;
1212 self.dstate.loop_sharpness = bc.read_bits(3) as u8;
1213
1214 self.read_dct_coef_prob_upd(&mut bc)?;
1215
1216 if !self.dstate.is_intra {
1217 self.dstate.prob_intra_pred = bc.read_byte();
1218 self.dstate.prob_last_pred = bc.read_byte();
1219 if bc.read_bool() {
1220 for i in 0..4 {
1221 self.dstate.kf_ymode_prob[i] = bc.read_byte();
1222 }
1223 }
1224 if bc.read_bool() {
1225 for i in 0..3 {
1226 self.dstate.kf_uvmode_prob[i] = bc.read_byte();
1227 }
1228 }
1229 self.read_mv_prob_upd(&mut bc)?;
1230 }
1231 if !keep_probs {
1232 self.tmp_scan.copy_from_slice(&self.scan);
1233 }
1234
1235 let vinfo = NAVideoInfo::new(self.width, self.height, false, YUV420_FORMAT);
1236 let ret = supp.pool_u8.get_free();
1237 if ret.is_none() {
1238 return Err(DecoderError::AllocError);
1239 }
1240 let mut buf = ret.unwrap();
1241 if buf.get_info() != vinfo {
1242 self.shuf.clear();
1243 supp.pool_u8.reset();
1244 supp.pool_u8.prealloc_video(vinfo, 4)?;
1245 let ret = supp.pool_u8.get_free();
1246 if ret.is_none() {
1247 return Err(DecoderError::AllocError);
1248 }
1249 buf = ret.unwrap();
1250 }
1251 let mut dframe = NASimpleVideoFrame::from_video_buf(&mut buf).unwrap();
1252
1253 let mut mb_idx = 0;
1254 self.pcache.reset();
f7125215
KS
1255 if self.dstate.is_intra || (self.dstate.version > 0) {
1256 self.dstate.pdc_pred_val = 0;
1257 self.dstate.pdc_pred_count = 0;
1258 }
587a6d78
KS
1259 let mut use_last = true;
1260 for mb_y in 0..self.mb_h {
1261 for mb_x in 0..self.mb_w {
1262 self.decode_mb_features(&mut bc, mb_x, mb_y)?;
1263 self.dstate.has_y2 = true;
1264 if self.dstate.is_intra {
1265 let ymode = bc.read_tree(KF_Y_MODE_TREE, KF_Y_MODE_TREE_PROBS);
1266 if ymode == PredMode::BPred {
1267 self.dstate.has_y2 = false;
1268 let mut iidx = mb_x * 4 + mb_y * 4 * self.ymode_stride;
1269 for y in 0..4 {
1270 for x in 0..4 {
1271 let top_mode = if (y > 0) || (mb_y > 0) {
1272 self.ymodes[iidx + x - self.ymode_stride]
1273 } else {
1274 PredMode::DCPred
1275 };
1276 let left_mode = if (x > 0) || (mb_x > 0) {
1277 self.ymodes[iidx + x - 1]
1278 } else {
1279 PredMode::DCPred
1280 };
1281 let top_idx = top_mode.to_b_index();
1282 let left_idx = left_mode.to_b_index();
1283 let bmode = bc.read_tree(B_MODE_TREE, &KF_B_MODE_TREE_PROBS[top_idx][left_idx]);
1284 self.ymodes[iidx + x] = bmode;
1285 }
1286 iidx += self.ymode_stride;
1287 }
1288 } else {
1289 self.fill_ymode(mb_x, mb_y, ymode.to_b_mode());
1290 }
1291 let uvmode = bc.read_tree(UV_MODE_TREE, KF_UV_MODE_TREE_PROBS);
1292 self.mb_info[mb_idx].mb_type = VPMBType::Intra;
1293 self.mb_info[mb_idx].ymode = ymode;
1294 self.mb_info[mb_idx].uvmode = uvmode;
1295 } else if !bc.read_prob(self.dstate.prob_intra_pred) {
1296 let ymode = bc.read_tree(Y_MODE_TREE, &self.dstate.kf_ymode_prob);
1297 if ymode == PredMode::BPred {
1298 self.dstate.has_y2 = false;
1299 let mut iidx = mb_x * 4 + mb_y * 4 * self.ymode_stride;
1300 for _y in 0..4 {
1301 for x in 0..4 {
1302 let bmode = bc.read_tree(B_MODE_TREE, B_MODE_TREE_PROBS);
1303 self.ymodes[iidx + x] = bmode;
1304 }
1305 iidx += self.ymode_stride;
1306 }
1307 } else {
1308 self.fill_ymode(mb_x, mb_y, PredMode::Inter);
1309 }
1310 let uvmode = bc.read_tree(UV_MODE_TREE, &self.dstate.kf_uvmode_prob);
1311 self.mb_info[mb_idx].mb_type = VPMBType::Intra;
1312 self.mb_info[mb_idx].ymode = ymode;
1313 self.mb_info[mb_idx].uvmode = uvmode;
1314 self.fill_mv(mb_x, mb_y, ZERO_MV);
1315 } else {
1316 use_last = !bc.read_prob(self.dstate.prob_last_pred);
1317
1318 let (mvprobs, nearest_mv, near_mv, pred_mv) = self.find_mv_pred(mb_x, mb_y);
1319 let mbtype = bc.read_tree(MV_REF_TREE, &mvprobs);
1320
1321 match mbtype {
1322 VPMBType::InterNearest => {
1323 self.fill_mv(mb_x, mb_y, nearest_mv);
1324 },
1325 VPMBType::InterNear => {
1326 self.fill_mv(mb_x, mb_y, near_mv);
1327 },
1328 VPMBType::InterNoMV => {
1329 self.fill_mv(mb_x, mb_y, ZERO_MV);
1330 },
1331 VPMBType::InterMV => {
1332 let dmy = decode_mv_component(&mut bc, &self.dstate.mv_probs[0]);
1333 let dmx = decode_mv_component(&mut bc, &self.dstate.mv_probs[1]);
1334 let new_mv = pred_mv + MV{ x: dmx, y: dmy };
1335 self.fill_mv(mb_x, mb_y, new_mv);
1336 },
1337 VPMBType::InterFourMV => {
1338 self.do_split_mv(&mut bc, mb_x, mb_y, pred_mv)?;
1339 },
1340 _ => unreachable!(),
1341 };
1342
1343 self.fill_ymode(mb_x, mb_y, PredMode::Inter);
1344 self.mb_info[mb_idx].mb_type = mbtype;
1345 self.mb_info[mb_idx].ymode = PredMode::Inter;
1346 self.mb_info[mb_idx].uvmode = PredMode::Inter;
1347 }
1348 self.decode_residue(&mut bc_main, mb_x, mb_idx);
1349 match self.mb_info[mb_idx].mb_type {
1350 VPMBType::Intra => {
1351 self.recon_intra_mb(&mut dframe, mb_x, mb_y)?;
1352 },
1353 _ => {
1354 self.recon_inter_mb(&mut dframe, mb_x, mb_y, use_last);
1355 },
1356 }
1357 if let Some(loop_str) = self.dstate.force_loop_str {
1358 self.mb_info[mb_idx].loop_str = loop_str;
1359 } else {
1360 self.mb_info[mb_idx].loop_str = self.dstate.loop_filter_level;
1361 }
1362 self.mb_info[mb_idx].upd_gf = self.dstate.force_gf_update;
1363 mb_idx += 1;
1364 }
1365 self.pcache.update_row();
1366 }
1367 let mut mb_idx = 0;
1368 for mb_y in 0..self.mb_h {
1369 for mb_x in 0..self.mb_w {
1370 let loop_str = self.mb_info[mb_idx].loop_str;
1371 self.loop_filter_mb(&mut dframe, mb_x, mb_y, loop_str);
1372 mb_idx += 1;
1373 }
1374 }
1375 if !update_gf && self.dstate.features[2].is_some() {
1376 let gf = self.shuf.get_golden().unwrap();
1377 let mut new_gf = supp.pool_u8.get_copy(&gf).unwrap();
1378 let dframe = NASimpleVideoFrame::from_video_buf(&mut new_gf).unwrap();
1379 let mut mb_idx = 0;
1380 let mut mc_buf = self.mc_buf.get_data_mut().unwrap();
1381 for mb_y in 0..self.mb_h {
1382 for mb_x in 0..self.mb_w {
1383 if self.mb_info[mb_idx].upd_gf {
1384 mc_block16x16(dframe.data, dframe.offset[0] + mb_x * 16 + mb_y * 16 * dframe.stride[0], dframe.stride[0], mb_x * 16, mb_y * 16, 0, 0, buf.clone(), 0, &mut mc_buf);
1385 mc_block8x8(dframe.data, dframe.offset[1] + mb_x * 8 + mb_y * 8 * dframe.stride[1], dframe.stride[1], mb_x * 8, mb_y * 8, 0, 0, buf.clone(), 1, &mut mc_buf);
1386 mc_block8x8(dframe.data, dframe.offset[2] + mb_x * 8 + mb_y * 8 * dframe.stride[2], dframe.stride[2], mb_x * 8, mb_y * 8, 0, 0, buf.clone(), 2, &mut mc_buf);
1387 }
1388 mb_idx += 1;
1389 }
1390 }
1391 self.shuf.add_golden_frame(new_gf);
1392 }
1393
1394 if !keep_probs {
1395 self.scan.copy_from_slice(&self.tmp_scan);
1396 }
1397 if update_gf {
1398 self.shuf.add_golden_frame(buf.clone());
1399 }
1400 self.shuf.add_frame(buf.clone());
1401
1402 let mut frm = NAFrame::new_from_pkt(pkt, self.info.clone(), NABufferType::Video(buf));
1403 frm.set_keyframe(self.dstate.is_intra);
1404 frm.set_frame_type(if self.dstate.is_intra { FrameType::I } else { FrameType::P });
1405 Ok(frm.into_ref())
1406 }
f9be4e75
KS
1407 fn flush(&mut self) {
1408 self.shuf.clear();
1409 }
587a6d78
KS
1410}
1411
08a1fab7 1412pub fn get_decoder() -> Box<NADecoder + Send> {
587a6d78
KS
1413 Box::new(VP7Decoder::new())
1414}
1415
1416#[cfg(test)]
1417mod test {
1418 use nihav_core::codecs::RegisteredDecoders;
1419 use nihav_core::demuxers::RegisteredDemuxers;
1420 use nihav_core::test::dec_video::*;
1421 use crate::codecs::duck_register_all_codecs;
1422 use nihav_commonfmt::demuxers::generic_register_all_demuxers;
1423
1424 #[test]
1425 fn test_vp7() {
1426 let mut dmx_reg = RegisteredDemuxers::new();
1427 generic_register_all_demuxers(&mut dmx_reg);
1428 let mut dec_reg = RegisteredDecoders::new();
1429 duck_register_all_codecs(&mut dec_reg);
1430
1431 //let file = "assets/Duck/potter-40.vp7";
1432 //let file = "assets/Duck/potter-500.vp7";
1433 //let file = "assets/Duck/starsky-700.vp7";
1434 //let file = "assets/Duck/taking-700.vp7";
1435 //let file = "assets/Duck/troy-700.vp7";
1436 let file = "assets/Duck/interlaced_blit_pitch.avi";
1437 //let file = "assets/Duck/vp7.avi";
1438 test_file_decoding("avi", file, Some(12), true, false, None/*Some("vp7")*/, &dmx_reg, &dec_reg);
1439 }
1440}
1441
1442/*const DEFAULT_ZIGZAG: [usize; 16] = [
1443 0, 1, 5, 6,
1444 2, 4, 7, 12,
1445 3, 8, 11, 13,
1446 9, 10, 14, 15
1447];*/
1448const DEFAULT_SCAN_ORDER: [usize; 16] = [
1449 0, 1, 4, 8,
1450 5, 2, 3, 6,
1451 9, 12, 13, 10,
1452 7, 11, 14, 15
1453];
1454
1455const Y_MODE_TREE: &[VPTreeDef<PredMode>] = &[
1456 VPTreeDef::Value(PredMode::DCPred), VPTreeDef::Index(2),
1457 VPTreeDef::Index(4), VPTreeDef::Index(6),
1458 VPTreeDef::Value(PredMode::VPred), VPTreeDef::Value(PredMode::HPred),
1459 VPTreeDef::Value(PredMode::TMPred), VPTreeDef::Value(PredMode::BPred),
1460];
1461const KF_Y_MODE_TREE: &[VPTreeDef<PredMode>] = &[
1462 VPTreeDef::Value(PredMode::BPred), VPTreeDef::Index(2),
1463 VPTreeDef::Index(4), VPTreeDef::Index(6),
1464 VPTreeDef::Value(PredMode::DCPred), VPTreeDef::Value(PredMode::VPred),
1465 VPTreeDef::Value(PredMode::HPred), VPTreeDef::Value(PredMode::TMPred),
1466];
1467const UV_MODE_TREE: &[VPTreeDef<PredMode>] = &[
1468 VPTreeDef::Value(PredMode::DCPred), VPTreeDef::Index(2),
1469 VPTreeDef::Value(PredMode::VPred), VPTreeDef::Index(4),
1470 VPTreeDef::Value(PredMode::HPred), VPTreeDef::Value(PredMode::TMPred)
1471];
1472const B_MODE_TREE: &[VPTreeDef<PredMode>] = &[
1473 VPTreeDef::Value(PredMode::DCPred), VPTreeDef::Index(2),
1474 VPTreeDef::Value(PredMode::TMPred), VPTreeDef::Index(4),
1475 VPTreeDef::Value(PredMode::VPred), VPTreeDef::Index(6),
1476 VPTreeDef::Index(8), VPTreeDef::Index(12),
1477 VPTreeDef::Value(PredMode::HPred), VPTreeDef::Index(10),
1478 VPTreeDef::Value(PredMode::RDPred), VPTreeDef::Value(PredMode::VRPred),
1479 VPTreeDef::Value(PredMode::LDPred), VPTreeDef::Index(14),
1480 VPTreeDef::Value(PredMode::VLPred), VPTreeDef::Index(16),
1481 VPTreeDef::Value(PredMode::HDPred), VPTreeDef::Value(PredMode::HUPred)
1482];
1483
1484const FEATURE_TREE: &[VPTreeDef<usize>] = &[
1485 VPTreeDef::Index(2), VPTreeDef::Index(4),
1486 VPTreeDef::Value(0), VPTreeDef::Value(1),
1487 VPTreeDef::Value(2), VPTreeDef::Value(3)
1488];
1489
1490const COEF_TREE: &[VPTreeDef<DCTToken>] = &[
1491 VPTreeDef::Value(DCTToken::EOB), VPTreeDef::Index(2),
1492 VPTreeDef::Value(DCTToken::Zero), VPTreeDef::Index(4),
1493 VPTreeDef::Value(DCTToken::One), VPTreeDef::Index(6),
1494 VPTreeDef::Index(8), VPTreeDef::Index(12),
1495 VPTreeDef::Value(DCTToken::Two), VPTreeDef::Index(10),
1496 VPTreeDef::Value(DCTToken::Three), VPTreeDef::Value(DCTToken::Four),
1497 VPTreeDef::Index(14), VPTreeDef::Index(16),
1498 VPTreeDef::Value(DCTToken::Cat1), VPTreeDef::Value(DCTToken::Cat2),
1499 VPTreeDef::Index(18), VPTreeDef::Index(20),
1500 VPTreeDef::Value(DCTToken::Cat3), VPTreeDef::Value(DCTToken::Cat4),
1501 VPTreeDef::Value(DCTToken::Cat5), VPTreeDef::Value(DCTToken::Cat6)
1502];
1503
1504const MV_REF_TREE: &[VPTreeDef<VPMBType>] = &[
1505 VPTreeDef::Value(VPMBType::InterNoMV), VPTreeDef::Index(2),
1506 VPTreeDef::Value(VPMBType::InterNearest), VPTreeDef::Index(4),
1507 VPTreeDef::Value(VPMBType::InterNear), VPTreeDef::Index(6),
1508 VPTreeDef::Value(VPMBType::InterMV), VPTreeDef::Value(VPMBType::InterFourMV)
1509];
1510const SMALL_MV_TREE: &[VPTreeDef<i16>] = &[
1511 VPTreeDef::Index(2), VPTreeDef::Index(8),
1512 VPTreeDef::Index(4), VPTreeDef::Index(6),
1513 VPTreeDef::Value(0), VPTreeDef::Value(1),
1514 VPTreeDef::Value(2), VPTreeDef::Value(3),
1515 VPTreeDef::Index(10), VPTreeDef::Index(12),
1516 VPTreeDef::Value(4), VPTreeDef::Value(5),
1517 VPTreeDef::Value(6), VPTreeDef::Value(7)
1518];
1519const MV_SPLIT_MODE_TREE: &[VPTreeDef<MVSplitMode>] = &[
1520 VPTreeDef::Value(MVSplitMode::Sixteenths), VPTreeDef::Index(2),
1521 VPTreeDef::Value(MVSplitMode::Quarters), VPTreeDef::Index(4),
1522 VPTreeDef::Value(MVSplitMode::TopBottom), VPTreeDef::Value(MVSplitMode::LeftRight)
1523];
1524const SUB_MV_REF_TREE: &[VPTreeDef<SubMVRef>] = &[
1525 VPTreeDef::Value(SubMVRef::Left), VPTreeDef::Index(2),
1526 VPTreeDef::Value(SubMVRef::Above), VPTreeDef::Index(4),
1527 VPTreeDef::Value(SubMVRef::Zero), VPTreeDef::Value(SubMVRef::New)
1528];