vp7enc: remove duplicate tables
[nihav.git] / nihav-duck / src / codecs / vp7enc / frame_coder.rs
1 use nihav_core::codecs::*;
2 use nihav_codec_support::codecs::ZERO_MV;
3 use super::super::vp78::PredMode;
4 use super::super::vp78dsp::*;
5 use super::super::vp7data::HIGH_EDGE_VAR_THR;
6 use super::super::vp7dsp::*;
7 use super::blocks::*;
8 use super::coder::*;
9 use super::mb_coding::*;
10 use super::models::*;
11 use super::motion_est::*;
12 use super::rdo::*;
13
14 const MBT_Q_OFFSET: usize = 3;
15
16 pub struct LoopParams {
17 pub loop_sharpness: u8,
18 pub loop_filter_level: u8,
19 pub lf_simple: bool,
20 }
21
22 pub struct FrameEncoder {
23 mb_w: usize,
24 mb_h: usize,
25 pub loop_params: LoopParams,
26
27 sblocks: Vec<SrcBlock>,
28 res: Vec<Residue>,
29 mbtypes: Vec<MBType>,
30 recon: Vec<SrcBlock>,
31 features: Vec<u8>,
32 has_features: bool,
33
34 pctx: PredContext,
35
36 me_mode: MVSearchMode,
37 me_range: i16,
38 mc_buf1: NAVideoBufferRef<u8>,
39 mc_buf2: NAVideoBufferRef<u8>,
40 mv_search_last: Box<dyn MVSearch + Send>,
41 mv_search_gold: Box<dyn MVSearch + Send>,
42 }
43
44 impl FrameEncoder {
45 pub fn new(mc_buf1: NAVideoBufferRef<u8>, mc_buf2: NAVideoBufferRef<u8>) -> Self {
46 let me_mode = MVSearchMode::default();
47
48 Self {
49 mb_w: 0,
50 mb_h: 0,
51
52 sblocks: Vec::new(),
53 res: Vec::new(),
54 mbtypes: Vec::new(),
55 recon: Vec::new(),
56 features: Vec::new(),
57 has_features: false,
58
59 pctx: PredContext::new(),
60
61 loop_params: LoopParams {
62 loop_filter_level: 0,
63 loop_sharpness: 0,
64 lf_simple: true,
65 },
66 me_mode,
67 me_range: 0,
68 mv_search_last: me_mode.create_search(),
69 mv_search_gold: me_mode.create_search(),
70 mc_buf1, mc_buf2,
71 }
72 }
73 pub fn resize(&mut self, mb_w: usize, mb_h: usize) {
74 self.mb_w = mb_w;
75 self.mb_h = mb_h;
76
77 self.pctx.resize(mb_w, mb_h);
78
79 self.sblocks.clear();
80 self.sblocks.reserve(mb_w * mb_h);
81 self.res.clear();
82 self.res.reserve(mb_w * mb_h);
83 self.mbtypes.clear();
84 self.mbtypes.reserve(mb_w * mb_h);
85 self.recon.clear();
86 self.recon.reserve(mb_w * mb_h);
87 self.features.clear();
88 self.features.reserve(mb_w * mb_h);
89 }
90 pub fn set_me_params(&mut self, me_mode: MVSearchMode, me_range: i16, version: u8) {
91 self.me_range = me_range;
92 if self.me_mode != me_mode {
93 self.me_mode = me_mode;
94 self.mv_search_last = me_mode.create_search();
95 self.mv_search_gold = me_mode.create_search();
96 }
97 self.pctx.version = version;
98 }
99 pub fn load_frame(&mut self, vbuf: &NAVideoBuffer<u8>) {
100 load_blocks(vbuf, &mut self.sblocks);
101 }
102
103 pub fn mb_tree_search(&mut self, ref_frm: NAVideoBufferRef<u8>, mb_map: &[usize], new_mb_map: &mut [usize], mb_weights: &mut [usize]) {
104 let mut mv_est = MVEstimator::new(ref_frm, self.mc_buf1.clone(), self.me_range);
105 self.mv_search_last.preinit(&mv_est);
106 let mut mb_idx = 0;
107 new_mb_map.copy_from_slice(mb_map);
108 for (mb_y, mb_row) in self.sblocks.chunks(self.mb_w).enumerate() {
109 for (mb_x, blk) in mb_row.iter().enumerate() {
110 let (mv, _) = self.mv_search_last.search_mb(&mut mv_est, blk, mb_x, mb_y);
111
112 if mv != ZERO_MV {
113 let new_x = ((((mb_x as isize) * 64 + (mv.x as isize) + 32) >> 6).max(0) as usize).min(self.mb_w - 1);
114 let new_y = ((((mb_y as isize) * 64 + (mv.y as isize) + 32) >> 6).max(0) as usize).min(self.mb_h - 1);
115 let nidx = new_x + new_y * self.mb_w;
116 new_mb_map[mb_idx] = mb_map[nidx];
117 }
118 mb_weights[new_mb_map[mb_idx]] += 1;
119 mb_idx += 1;
120 }
121 }
122 }
123
124 pub fn intra_blocks(&mut self, base_q: usize, metric: &RateDistMetric, models: &VP7Models, mbt_map: Option<&[usize]>) {
125 self.mbtypes.clear();
126 self.pctx.reset();
127 self.pctx.reset_intra();
128 self.res.clear();
129 self.recon.clear();
130 self.features.clear();
131
132 self.has_features = false;
133 if base_q > MBT_Q_OFFSET {
134 if let Some(map) = mbt_map {
135 let sum: usize = map.iter().sum();
136 let size = map.len();
137 let avg = (sum + size / 2) / size;
138 for &val in map.iter() {
139 if val > avg {
140 self.features.push(1);
141 self.has_features = true;
142 } else {
143 self.features.push(0);
144 }
145 }
146 } else {
147 for _ in 0..(self.mb_w * self.mb_h) {
148 self.features.push(0);
149 }
150 }
151 } else {
152 for _ in 0..(self.mb_w * self.mb_h) {
153 self.features.push(0);
154 }
155 }
156
157 let mut imctx = IntraModePredCtx {
158 metric,
159 models,
160 tr: [0; 4],
161 q: base_q,
162 ipred_y: IPredContext::default(),
163 ipred_u: IPredContext::default(),
164 ipred_v: IPredContext::default(),
165 pctx: BlockPCtx::default(),
166 };
167
168 for (mb_y, mb_row) in self.sblocks.chunks_mut(self.mb_w).enumerate() {
169 imctx.ipred_y.has_top = mb_y != 0;
170 imctx.ipred_u.has_top = mb_y != 0;
171 imctx.ipred_v.has_top = mb_y != 0;
172
173 for (mb_x, sblk) in mb_row.iter().enumerate() {
174 self.pctx.fill_ipred(0, mb_x, &mut imctx.ipred_y);
175 self.pctx.fill_ipred(1, mb_x, &mut imctx.ipred_u);
176 self.pctx.fill_ipred(2, mb_x, &mut imctx.ipred_v);
177 self.pctx.fill_pctx(mb_x, &mut imctx.pctx);
178 if self.has_features {
179 imctx.q = if self.features[mb_x + mb_y * self.mb_w] != 0 {
180 base_q - MBT_Q_OFFSET
181 } else {
182 base_q
183 };
184 }
185
186 let mut res = Residue::new();
187 let mut newblk = SrcBlock::default();
188
189 imctx.tr = self.pctx.get_ipred_tr(mb_x);
190 let mut mb_type = select_intra_mode(sblk, &mut newblk, &mut res, &imctx, MAX_DIST, MBType::InterNoMV(false, [0;4]));
191
192 let use_i4 = match mb_type {
193 MBType::Intra(best_ymode, best_cmode) => {
194 sblk.apply_ipred_luma(best_ymode, &imctx.ipred_y, &mut res);
195 newblk.fill_ipred_luma(best_ymode, &imctx.ipred_y);
196 sblk.apply_ipred_chroma(best_cmode, &imctx.ipred_u, &imctx.ipred_v, &mut res);
197 newblk.fill_ipred_chroma(best_cmode, &imctx.ipred_u, &imctx.ipred_v);
198 res.fdct();
199 res.fdct_dc_block();
200
201 self.pctx.ymodes.set_mode(mb_x, best_ymode);
202
203 false
204 },
205 MBType::Intra4x4(ref i4_modes, ref mut i4ctx, best_cmode) => {
206 sblk.apply_ipred_chroma(best_cmode, &imctx.ipred_u, &imctx.ipred_v, &mut res);
207 newblk.fill_ipred_chroma(best_cmode, &imctx.ipred_u, &imctx.ipred_v);
208 res.fdct();
209
210 self.pctx.ymodes.set_modes4x4(mb_x, i4_modes, i4ctx);
211
212 true
213 },
214 _ => unreachable!(),
215 };
216
217 res.quant(imctx.q);
218 self.pctx.set_nz(mb_x, &res);
219 let mut recon = res.clone();
220 self.res.push(res);
221 self.mbtypes.push(mb_type);
222
223 if !use_i4 {
224 recon.add_residue(&mut newblk);
225 } else {
226 recon.add_residue_chroma(&mut newblk);
227 }
228
229 self.pctx.update_mb(&newblk, mb_x);
230 self.recon.push(newblk);
231 }
232 self.pctx.update_mb_row();
233 }
234 }
235 pub fn inter_blocks(&mut self, q: usize, metric: &RateDistMetric, models: &VP7Models, last_frame: &NABufferType, gold_frame: &NABufferType) {
236 self.has_features = false;
237
238 let mut mv_est_last = MVEstimator::new(last_frame.get_vbuf().unwrap(), self.mc_buf1.clone(), self.me_range);
239 self.mv_search_last.preinit(&mv_est_last);
240 let mut mv_est_gold = if let Some(gbuf) = gold_frame.get_vbuf() {
241 let mv_est = MVEstimator::new(gbuf, self.mc_buf2.clone(), self.me_range);
242 self.mv_search_gold.preinit(&mv_est);
243 Some(mv_est)
244 } else {
245 None
246 };
247
248 self.mbtypes.clear();
249 self.pctx.reset();
250 self.pctx.save_dc_pred();
251 self.res.clear();
252 self.recon.clear();
253 self.features.clear();
254
255 let mut imctx = IntraModePredCtx {
256 metric,
257 models,
258 tr: [0; 4],
259 q,
260 ipred_y: IPredContext::default(),
261 ipred_u: IPredContext::default(),
262 ipred_v: IPredContext::default(),
263 pctx: BlockPCtx::default(),
264 };
265
266 for (mb_y, mb_row) in self.sblocks.chunks_mut(self.mb_w).enumerate() {
267 imctx.ipred_y.has_top = mb_y != 0;
268 imctx.ipred_u.has_top = mb_y != 0;
269 imctx.ipred_v.has_top = mb_y != 0;
270
271 for (mb_x, sblk) in mb_row.iter().enumerate() {
272 self.pctx.fill_ipred(0, mb_x, &mut imctx.ipred_y);
273 self.pctx.fill_ipred(1, mb_x, &mut imctx.ipred_u);
274 self.pctx.fill_ipred(2, mb_x, &mut imctx.ipred_v);
275 self.pctx.fill_pctx(mb_x, &mut imctx.pctx);
276
277 let mut res = Residue::new();
278 let mut newblk = SrcBlock::default();
279
280 let (mvprobs, nearest_mv, near_mv, pred_mv) = self.pctx.find_mv_pred(mb_x, mb_y);
281
282 let (mv, _dist) = self.mv_search_last.search_mb(&mut mv_est_last, sblk, mb_x, mb_y);
283
284 mv_est_last.get_mb(&mut newblk, mb_x, mb_y, mv);
285 let mv_nits_dist = metric.calc_metric(0, inter_mv_nits(mv, &mvprobs, nearest_mv, near_mv, pred_mv, models));
286 let last_dist = calc_inter_mb_dist(sblk, &newblk, &mut res, &imctx, self.pctx.get_y2_dc_pred(true)) + mv_nits_dist;
287
288 let (gmv, gold_dist) = if last_dist > SMALL_DIST {
289 if let Some(ref mut mv_est) = &mut mv_est_gold {
290 let (gmv, _gdist) = self.mv_search_gold.search_mb(mv_est, sblk, mb_x, mb_y);
291 mv_est.get_mb(&mut newblk, mb_x, mb_y, gmv);
292 let mv_nits_dist = metric.calc_metric(0, inter_mv_nits(gmv, &mvprobs, nearest_mv, near_mv, pred_mv, models));
293 let gdist = calc_inter_mb_dist(sblk, &newblk, &mut res, &imctx, self.pctx.get_y2_dc_pred(false)) + mv_nits_dist;
294 (gmv, gdist)
295 } else {
296 (ZERO_MV, MAX_DIST)
297 }
298 } else {
299 (ZERO_MV, MAX_DIST)
300 };
301
302 let (last, mut inter_dist, mv, mv_est) = if last_dist < gold_dist {
303 (true, last_dist, mv, &mut mv_est_last)
304 } else if let Some (ref mut mv_est) = &mut mv_est_gold {
305 (false, gold_dist, gmv, mv_est)
306 } else {
307 unreachable!()
308 };
309
310 let mut mb_type = if mv == ZERO_MV {
311 MBType::InterNoMV(last, mvprobs)
312 } else if mv == nearest_mv {
313 MBType::InterNearest(last, mvprobs)
314 } else if mv == near_mv {
315 MBType::InterNear(last, mvprobs)
316 } else {
317 MBType::InterMV(last, mvprobs, mv - pred_mv)
318 };
319 if inter_dist > SMALL_DIST {
320 if let MBType::InterMV(_, _, _) = mb_type { // xxx: maybe do it for all types?
321 let mv_search = if last { &mut self.mv_search_last } else { &mut self.mv_search_gold };
322 if let Some((mbt, dist)) = try_inter_split(sblk, &mut newblk, &mut res, mvprobs, nearest_mv, near_mv, pred_mv, last, mb_x, mb_y, mv_search, mv_est, &mut self.pctx, &imctx, inter_dist) {
323 mb_type = mbt;
324 inter_dist = dist;
325 }
326 }
327 }
328
329 if inter_dist > SMALL_DIST {
330 imctx.tr = self.pctx.get_ipred_tr(mb_x);
331 mb_type = select_intra_mode(sblk, &mut newblk, &mut res, &imctx, inter_dist, mb_type);
332 }
333
334 self.mbtypes.push(mb_type);
335 res.reset();
336 match mb_type {
337 MBType::Intra(ymode, cmode) => {
338 newblk.fill_ipred_luma(ymode, &imctx.ipred_y);
339 newblk.fill_ipred_chroma(cmode, &imctx.ipred_u, &imctx.ipred_v);
340 self.pctx.ymodes.set_mode(mb_x, ymode);
341 self.pctx.fill_mv(mb_x, mb_y, ZERO_MV);
342 },
343 MBType::Intra4x4(ref i4_modes, ref mut i4ctx, cmode) => {
344 newblk.fill_ipred_chroma(cmode, &imctx.ipred_u, &imctx.ipred_v);
345 self.pctx.ymodes.set_modes4x4(mb_x, i4_modes, i4ctx);
346 self.pctx.fill_mv(mb_x, mb_y, ZERO_MV);
347 },
348 MBType::InterNoMV(_, _) |
349 MBType::InterNearest(_, _) |
350 MBType::InterNear(_, _) |
351 MBType::InterMV(_, _, _) => {
352 mv_est.get_mb(&mut newblk, mb_x, mb_y, mv);
353 self.pctx.fill_mv(mb_x, mb_y, mv);
354 self.pctx.ymodes.set_mode(mb_x, PredMode::Inter);
355 },
356 MBType::InterSplitMV(_, _, _, _, _) => {
357 self.pctx.ymodes.set_mode(mb_x, PredMode::Inter);
358 recon_split_mb(&mut newblk, mb_x, mb_y, &self.pctx.mvs, self.pctx.mv_stride, mv_est);
359 },
360 };
361 if let MBType::Intra4x4(_, _, _) = mb_type {
362 res.set_chroma_from_diff(&sblk.chroma, &newblk.chroma);
363 res.fdct();
364 } else {
365 res.set_luma_from_diff(&sblk.luma, &newblk.luma);
366 res.set_chroma_from_diff(&sblk.chroma, &newblk.chroma);
367 res.fdct();
368 res.fdct_dc_block();
369 if !mb_type.is_intra() {
370 requant_y2_dc(&mut res.dcs[0], q);
371 self.pctx.predict_y2_dc(&mut res.dcs[0], last);
372 }
373 }
374
375 res.quant(q);
376 self.pctx.set_nz(mb_x, &res);
377 let mut recon = res.clone();
378 self.res.push(res);
379 self.features.push(0);
380 if let MBType::Intra4x4(_, _, _) = mb_type {
381 recon.add_residue_chroma(&mut newblk);
382 } else {
383 recon.add_residue(&mut newblk);
384 }
385 self.pctx.update_mb(&newblk, mb_x);
386 self.recon.push(newblk);
387 }
388 self.pctx.update_mb_row();
389 }
390 }
391 pub fn encode_features(&self, bc: &mut BoolEncoder, q: usize, models: &VP7Models) -> EncoderResult<()> {
392 if self.has_features {
393 // first feature - quantiser
394 bc.put_bool(true, 128)?;
395 bc.put_byte(models.feature_present[0])?;
396 for &prob in models.feature_tree_probs[0].iter() {
397 bc.put_bool(prob != 255, 128)?;
398 if prob != 255 {
399 bc.put_byte(prob)?;
400 }
401 }
402 bc.put_bool(true, 128)?;
403 bc.put_bits((q - MBT_Q_OFFSET) as u32, 7)?;
404 for _ in 1..4 {
405 bc.put_bool(false, 128)?; // other quants
406 }
407
408 // other features (
409 for _ in 1..4 {
410 bc.put_bool(false, 128)?;
411 }
412 } else {
413 for _ in 0..4 {
414 bc.put_bool(false, 128)?;
415 }
416 }
417 Ok(())
418 }
419 pub fn encode_mb_types(&self, bc: &mut BoolEncoder, is_intra: bool, models: &VP7Models) -> EncoderResult<()> {
420 for (mb_type, &feature) in self.mbtypes.iter().zip(self.features.iter()) {
421 if self.has_features {
422 bc.encode_feature(0, if feature == 0 { None } else { Some(0) }, models)?;
423 }
424 bc.encode_mb_type(is_intra, mb_type, models)?;
425 }
426 Ok(())
427 }
428 pub fn encode_residues(&mut self, bc: &mut BoolEncoder, models: &VP7Models) -> EncoderResult<()> {
429 self.pctx.reset();
430 //self.pctx.restore_dc_pred();
431 for (_mb_y, mb_row) in self.res.chunks(self.mb_w).enumerate() {
432 for (mb_x, blk) in mb_row.iter().enumerate() {
433 if blk.has_dc {
434 let pctx = (self.pctx.nz_y2_left as u8) + (self.pctx.nz_y2_top[mb_x] as u8);
435 bc.encode_subblock(&blk.dcs, 1, pctx, models)?;
436 let has_nz = blk.dcs.has_nz();
437 self.pctx.nz_y2_left = has_nz;
438 self.pctx.nz_y2_top[mb_x] = has_nz;
439 }
440 let ytype = if blk.has_dc { 0 } else { 3 };
441 for (y, blk_row) in blk.luma.chunks(4).enumerate() {
442 for (x, blk) in blk_row.iter().enumerate() {
443 let pctx = (self.pctx.nz_y_left[y] as u8) + (self.pctx.nz_y_top[mb_x * 4 + x] as u8);
444 bc.encode_subblock(blk, ytype, pctx, models)?;
445 let has_nz = blk.has_nz();
446 self.pctx.nz_y_left[y] = has_nz;
447 self.pctx.nz_y_top[mb_x * 4 + x] = has_nz;
448 }
449 }
450
451 for (c, chroma) in blk.chroma.iter().enumerate() {
452 for (y, blk_row) in chroma.chunks(2).enumerate() {
453 for (x, blk) in blk_row.iter().enumerate() {
454 let pctx = (self.pctx.nz_c_left[c][y] as u8) + (self.pctx.nz_c_top[c][mb_x * 2 + x] as u8);
455 bc.encode_subblock(blk, 2, pctx, models)?;
456 let has_nz = blk.has_nz();
457 self.pctx.nz_c_left[c][y] = has_nz;
458 self.pctx.nz_c_top[c][mb_x * 2 + x] = has_nz;
459 }
460 }
461 }
462 }
463 self.pctx.update_mb_row();
464 }
465 Ok(())
466 }
467 pub fn generate_models(&mut self, is_intra: bool, stats: &mut VP7ModelsStat) {
468 stats.reset();
469 let est = Estimator::new();
470 self.pctx.reset();
471 if self.has_features {
472 for &feat in self.features.iter() {
473 est.estimate_feature(0, if feat == 0 { None } else { Some(0) }, stats);
474 }
475 }
476 for (mbt_row, mb_row) in self.mbtypes.chunks(self.mb_w).zip(self.res.chunks(self.mb_w)) {
477 for (mb_x, (mbtype, blk)) in mbt_row.iter().zip(mb_row.iter()).enumerate() {
478 est.estimate_mb_type(is_intra, mbtype, stats);
479 if blk.has_dc {
480 let pctx = (self.pctx.nz_y2_left as u8) + (self.pctx.nz_y2_top[mb_x] as u8);
481 est.estimate_subblock(&blk.dcs, 1, pctx, stats);
482 let has_nz = blk.dcs.has_nz();
483 self.pctx.nz_y2_left = has_nz;
484 self.pctx.nz_y2_top[mb_x] = has_nz;
485 }
486 let ytype = if blk.has_dc { 0 } else { 3 };
487 for (y, blk_row) in blk.luma.chunks(4).enumerate() {
488 for (x, blk) in blk_row.iter().enumerate() {
489 let pctx = (self.pctx.nz_y_left[y] as u8) + (self.pctx.nz_y_top[mb_x * 4 + x] as u8);
490 est.estimate_subblock(blk, ytype, pctx, stats);
491 let has_nz = blk.has_nz();
492 self.pctx.nz_y_left[y] = has_nz;
493 self.pctx.nz_y_top[mb_x * 4 + x] = has_nz;
494 }
495 }
496
497 for (c, chroma) in blk.chroma.iter().enumerate() {
498 for (y, blk_row) in chroma.chunks(2).enumerate() {
499 for (x, blk) in blk_row.iter().enumerate() {
500 let pctx = (self.pctx.nz_c_left[c][y] as u8) + (self.pctx.nz_c_top[c][mb_x * 2 + x] as u8);
501 est.estimate_subblock(blk, 2, pctx, stats);
502 let has_nz = blk.has_nz();
503 self.pctx.nz_c_left[c][y] = has_nz;
504 self.pctx.nz_c_top[c][mb_x * 2 + x] = has_nz;
505 }
506 }
507 }
508 }
509 self.pctx.update_mb_row();
510 }
511 }
512 pub fn reconstruct_frame(&mut self, frm: &mut NASimpleVideoFrame<u8>, is_intra: bool) {
513 let mut yidx = frm.offset[0];
514 let mut uidx = frm.offset[1];
515 let mut vidx = frm.offset[2];
516 let ystride = frm.stride[0];
517 let ustride = frm.stride[1];
518 let vstride = frm.stride[2];
519
520 for (mb_y, (f_row, mb_row)) in self.features.chunks(self.mb_w).zip(self.recon.chunks(self.mb_w)).enumerate() {
521 for (mb_x, (&feature, sblk)) in f_row.iter().zip(mb_row.iter()).enumerate() {
522 let dst = &mut frm.data[yidx + mb_x * 16..];
523 for (dst, src) in dst.chunks_mut(ystride).zip(sblk.luma.chunks(16)) {
524 dst[..16].copy_from_slice(src);
525 }
526 let dst = &mut frm.data[uidx + mb_x * 8..];
527 for (dst, src) in dst.chunks_mut(ustride).zip(sblk.chroma[0].chunks(8)) {
528 dst[..8].copy_from_slice(src);
529 }
530 let dst = &mut frm.data[vidx + mb_x * 8..];
531 for (dst, src) in dst.chunks_mut(vstride).zip(sblk.chroma[1].chunks(8)) {
532 dst[..8].copy_from_slice(src);
533 }
534
535 let loop_str = if feature != 2 {
536 self.loop_params.loop_filter_level
537 } else { 0 }; //todo
538 loop_filter_mb(frm, mb_x, mb_y, loop_str, &self.loop_params, is_intra);
539 }
540 yidx += ystride * 16;
541 uidx += ustride * 8;
542 vidx += vstride * 8;
543 }
544 }
545 }
546
547 fn loop_filter_mb(dframe: &mut NASimpleVideoFrame<u8>, mb_x: usize, mb_y: usize, loop_str: u8, loop_params: &LoopParams, is_intra: bool) {
548 let edge_thr = i16::from(loop_str) + 2;
549 let luma_thr = i16::from(loop_str);
550 let chroma_thr = i16::from(loop_str) * 2;
551 let inner_thr = if loop_params.loop_sharpness == 0 {
552 i16::from(loop_str)
553 } else {
554 let bound1 = i16::from(9 - loop_params.loop_sharpness);
555 let shift = (loop_params.loop_sharpness + 3) >> 2;
556 (i16::from(loop_str) >> shift).min(bound1)
557 };
558 let hev_thr = i16::from(HIGH_EDGE_VAR_THR[if is_intra { 1 } else { 0 }][loop_str as usize]);
559
560 let ystride = dframe.stride[0];
561 let ustride = dframe.stride[1];
562 let vstride = dframe.stride[2];
563 let ypos = dframe.offset[0] + mb_x * 16 + mb_y * 16 * ystride;
564 let upos = dframe.offset[1] + mb_x * 8 + mb_y * 8 * ustride;
565 let vpos = dframe.offset[2] + mb_x * 8 + mb_y * 8 * vstride;
566
567 let (loop_edge, loop_inner) = if loop_params.lf_simple {
568 (simple_loop_filter as LoopFilterFunc, simple_loop_filter as LoopFilterFunc)
569 } else {
570 (normal_loop_filter_edge as LoopFilterFunc, normal_loop_filter_inner as LoopFilterFunc)
571 };
572
573 if mb_x > 0 {
574 loop_edge(dframe.data, ypos, 1, ystride, 16, edge_thr, inner_thr, hev_thr);
575 loop_edge(dframe.data, upos, 1, ustride, 8, edge_thr, inner_thr, hev_thr);
576 loop_edge(dframe.data, vpos, 1, vstride, 8, edge_thr, inner_thr, hev_thr);
577 }
578 if mb_y > 0 {
579 loop_edge(dframe.data, ypos, ystride, 1, 16, edge_thr, inner_thr, hev_thr);
580 loop_edge(dframe.data, upos, ustride, 1, 8, edge_thr, inner_thr, hev_thr);
581 loop_edge(dframe.data, vpos, vstride, 1, 8, edge_thr, inner_thr, hev_thr);
582 }
583
584 for y in 1..4 {
585 loop_inner(dframe.data, ypos + y * 4 * ystride, ystride, 1, 16, luma_thr, inner_thr, hev_thr);
586 }
587 loop_inner(dframe.data, upos + 4 * ustride, ustride, 1, 8, chroma_thr, inner_thr, hev_thr);
588 loop_inner(dframe.data, vpos + 4 * vstride, vstride, 1, 8, chroma_thr, inner_thr, hev_thr);
589
590 for x in 1..4 {
591 loop_inner(dframe.data, ypos + x * 4, 1, ystride, 16, luma_thr, inner_thr, hev_thr);
592 }
593 loop_inner(dframe.data, upos + 4, 1, ustride, 8, chroma_thr, inner_thr, hev_thr);
594 loop_inner(dframe.data, vpos + 4, 1, vstride, 8, chroma_thr, inner_thr, hev_thr);
595 }