fix or silence clippy warnings
[nihav.git] / nihav-itu / src / codecs / h264 / types.rs
CommitLineData
696e4e20
KS
1use nihav_codec_support::codecs::{MV, ZERO_MV};
2use nihav_codec_support::data::GenericCache;
3use super::FrameRefs;
4
5#[repr(u8)]
6#[derive(Clone,Copy,Debug,PartialEq)]
7pub enum BMode {
8 L0,
9 L1,
10 Bi,
11}
12
13#[derive(Clone,Copy,Debug,PartialEq)]
14pub enum MBType {
15 Intra4x4,
16 Intra8x8,
17 Intra16x16(u8, u8, u8),
18 PCM,
19
20 P16x16,
21 P16x8,
22 P8x16,
23 P8x8,
24 P8x8Ref0,
25 PSkip,
26
27 Direct,
28 B16x16(BMode),
29 B16x8(BMode, BMode),
30 B8x16(BMode, BMode),
31 B8x8,
32 BSkip,
33}
34
35impl MBType {
36 pub fn is_intra(self) -> bool {
37 match self {
38 MBType::Intra4x4 | MBType::Intra8x8 | MBType::Intra16x16(_, _, _) | MBType::PCM => true,
39 _ => false,
40 }
41 }
42 pub fn is_intra16x16(self) -> bool {
43 if let MBType::Intra16x16(_, _, _) = self {
44 true
45 } else {
46 false
47 }
48 }
49 pub fn is_skip(self) -> bool {
50 match self {
51 MBType::PSkip | MBType::BSkip => true,
52 _ => false,
53 }
54 }
55 pub fn is_4x4(self) -> bool { self.num_parts() == 4 }
56 pub fn is_l0(self, part: usize) -> bool {
57 match self {
58 MBType::B16x16(mode) => mode == BMode::L0,
59 MBType::B16x8(mode0, mode1) | MBType::B8x16(mode0, mode1) => {
60 if part == 0 {
61 mode0 == BMode::L0
62 } else {
63 mode1 == BMode::L0
64 }
65 },
66 MBType::Direct | MBType::BSkip => false,
67 _ => true,
68 }
69 }
70 pub fn is_l1(self, part: usize) -> bool {
71 match self {
72 MBType::B16x16(mode) => mode == BMode::L1,
73 MBType::B16x8(mode0, mode1) | MBType::B8x16(mode0, mode1) => {
74 if part == 0 {
75 mode0 == BMode::L1
76 } else {
77 mode1 == BMode::L1
78 }
79 },
80 _ => false,
81 }
82 }
83 pub fn num_parts(self) -> usize {
84 match self {
85 MBType::Intra4x4 | MBType::Intra8x8 | MBType::Intra16x16(_, _, _) | MBType::PCM |
86 MBType::PSkip |
87 MBType::Direct | MBType::BSkip
88 => 1,
89 MBType::P16x16 |
90 MBType::B16x16(_)
91 => 1,
92 MBType::P16x8 | MBType::P8x16 |
93 MBType::B16x8(_, _) | MBType::B8x16(_, _)
94 => 2,
95 _ => 4,
96 }
97 }
98 pub fn size(self) -> (usize, usize) {
99 match self {
100 MBType::Intra4x4 |
101 MBType::Intra8x8 |
102 MBType::Intra16x16(_, _, _) |
103 MBType::PCM |
104 MBType::P16x16 |
105 MBType::PSkip |
106 MBType::Direct |
107 MBType::B16x16(_) |
108 MBType::BSkip
109 => (16, 16),
110 MBType::P16x8 | MBType::B16x8(_, _) => (16, 8),
111 MBType::P8x16 | MBType::B8x16(_, _) => (8, 16),
112 _ => (8, 8),
113 }
114 }
115}
116
117impl Default for MBType {
118 fn default() -> Self { MBType::Intra4x4 }
119}
120
121#[derive(Clone,Copy,Debug,PartialEq)]
122pub enum SubMBType {
123 P8x8,
124 P8x4,
125 P4x8,
126 P4x4,
127 Direct8x8,
128 B8x8(BMode),
129 B8x4(BMode),
130 B4x8(BMode),
131 B4x4(BMode),
132}
133
134impl SubMBType {
135 pub fn num_parts(self) -> usize {
136 match self {
137 SubMBType::P8x8 | SubMBType::Direct8x8 | SubMBType::B8x8(_) => 1,
138 SubMBType::P4x4 | SubMBType::B4x4(_) => 4,
139 _ => 2,
140 }
141 }
142 pub fn size(self) -> (usize, usize) {
143 match self {
144 SubMBType::P8x8 | SubMBType::Direct8x8 | SubMBType::B8x8(_) => (8, 8),
145 SubMBType::P8x4 | SubMBType::B8x4(_) => (8, 4),
146 SubMBType::P4x8 | SubMBType::B4x8(_) => (4, 8),
147 SubMBType::P4x4 | SubMBType::B4x4(_) => (4, 4),
148 }
149 }
150 pub fn is_l0(self) -> bool {
151 match self {
152 SubMBType::B8x8(mode) | SubMBType::B8x4(mode) |
153 SubMBType::B4x8(mode) | SubMBType::B4x4(mode) => {
154 mode == BMode::L0
155 },
156 _ => true,
157 }
158 }
159 pub fn is_l1(self) -> bool {
160 match self {
161 SubMBType::B8x8(mode) | SubMBType::B8x4(mode) |
162 SubMBType::B4x8(mode) | SubMBType::B4x4(mode) => {
163 mode == BMode::L1
164 },
165 _ => false,
166 }
167 }
168}
169
170impl Default for SubMBType {
171 fn default() -> Self { SubMBType::Direct8x8 }
172}
173
174#[repr(u8)]
175#[derive(Clone,Copy,Debug,PartialEq)]
176pub enum CompactMBType {
177 Intra4x4,
178 Intra8x8,
179 Intra16x16,
180 PCM,
181
182 P16x16,
183 P16x8,
184 P8x16,
185 P8x8,
186 P8x8Ref0,
187 PSkip,
188
189 Direct,
190 B16x16,
191 B16x8,
192 B8x16,
193 B8x8,
194 BSkip,
195
196 None,
197}
198
199impl CompactMBType {
200 pub fn is_intra(self) -> bool {
201 match self {
202 CompactMBType::Intra4x4 | CompactMBType::Intra8x8 | CompactMBType::Intra16x16 => true,
203 _ => false,
204 }
205 }
206 pub fn is_intra16orpcm(self) -> bool {
207 match self {
208 CompactMBType::Intra16x16 | CompactMBType::PCM => true,
209 _ => false,
210 }
211 }
212 pub fn is_skip(self) -> bool {
213 match self {
214 CompactMBType::PSkip | CompactMBType::BSkip => true,
215 _ => false,
216 }
217 }
218 pub fn is_direct(self) -> bool {
219 match self {
220 CompactMBType::BSkip | CompactMBType::Direct | CompactMBType::None => true,
221 _ => false,
222 }
223 }
224 pub fn is_inter(self) -> bool {
225 !self.is_intra() && !self.is_skip() && self != CompactMBType::PCM
226 }
227 pub fn is_16x16(self) -> bool {
228 match self {
229 CompactMBType::P16x8 | CompactMBType::P8x16 |
230 CompactMBType::P8x8 | CompactMBType::P8x8Ref0 |
231 CompactMBType::B16x8 | CompactMBType::B8x16 |
232 CompactMBType::B8x8 => false,
233 _ => true,
234 }
235 }
236}
237
238impl Default for CompactMBType {
239 fn default() -> Self { CompactMBType::None }
240}
241
242impl From<MBType> for CompactMBType {
243 fn from(mbtype: MBType) -> Self {
244 match mbtype {
245 MBType::Intra4x4 => CompactMBType::Intra4x4,
246 MBType::Intra8x8 => CompactMBType::Intra8x8,
247 MBType::Intra16x16(_, _, _) => CompactMBType::Intra16x16,
248 MBType::PCM => CompactMBType::PCM,
249 MBType::P16x16 => CompactMBType::P16x16,
250 MBType::P16x8 => CompactMBType::P16x8,
251 MBType::P8x16 => CompactMBType::P8x16,
252 MBType::P8x8 => CompactMBType::P8x8,
253 MBType::P8x8Ref0 => CompactMBType::P8x8Ref0,
254 MBType::PSkip => CompactMBType::PSkip,
255 MBType::Direct => CompactMBType::Direct,
256 MBType::B16x16(_) => CompactMBType::B16x16,
257 MBType::B16x8(_, _) => CompactMBType::B16x8,
258 MBType::B8x16(_, _) => CompactMBType::B8x16,
259 MBType::B8x8 => CompactMBType::B8x8,
260 MBType::BSkip => CompactMBType::BSkip,
261 }
262 }
263}
264
265#[repr(u8)]
266#[derive(Clone,Copy,Debug,PartialEq)]
267pub enum IntraPredMode {
268 Vertical,
269 Horizontal,
270 DC,
271 DiagDownLeft,
272 DiagDownRight,
273 VerRight,
274 HorDown,
275 VerLeft,
276 HorUp,
277 None,
278}
279
280impl IntraPredMode {
281 pub fn is_none(self) -> bool { self == IntraPredMode::None }
282 pub fn into_pred_idx(self) -> i8 {
283 if !self.is_none() {
284 self as u8 as i8
285 } else {
286 -1
287 }
288 }
289}
290
291impl Default for IntraPredMode {
292 fn default() -> Self { IntraPredMode::None }
293}
294
295impl From<u8> for IntraPredMode {
296 fn from(val: u8) -> Self {
297 match val {
298 0 => IntraPredMode::Vertical,
299 1 => IntraPredMode::Horizontal,
300 2 => IntraPredMode::DC,
301 3 => IntraPredMode::DiagDownLeft,
302 4 => IntraPredMode::DiagDownRight,
303 5 => IntraPredMode::VerRight,
304 6 => IntraPredMode::HorDown,
305 7 => IntraPredMode::VerLeft,
306 8 => IntraPredMode::HorUp,
307 _ => IntraPredMode::None,
308 }
309 }
310}
311
312impl Into<u8> for IntraPredMode {
313 fn into(self) -> u8 {
314 match self {
315 IntraPredMode::Vertical => 0,
316 IntraPredMode::Horizontal => 1,
317 IntraPredMode::DC => 2,
318 IntraPredMode::DiagDownLeft => 3,
319 IntraPredMode::DiagDownRight => 4,
320 IntraPredMode::VerRight => 5,
321 IntraPredMode::HorDown => 6,
322 IntraPredMode::VerLeft => 7,
323 IntraPredMode::HorUp => 8,
324 _ => 9,
325 }
326 }
327}
328
329pub const MISSING_POC: u16 = 0xFFFF;
330
331#[derive(Clone,Copy,Debug)]
332pub struct PicRef {
333 ref_idx: u8
334}
335
336pub const MISSING_REF: PicRef = PicRef { ref_idx: 0xFF };
337pub const INVALID_REF: PicRef = PicRef { ref_idx: 0xFE };
338pub const ZERO_REF: PicRef = PicRef { ref_idx: 0 };
339const DIRECT_FLAG: u8 = 0x40;
340
341impl PicRef {
342 pub fn new(ref_idx: u8) -> Self {
343 Self { ref_idx }
344 }
345 pub fn not_avail(self) -> bool {
346 self == MISSING_REF || self == INVALID_REF
347 }
348 pub fn index(self) -> usize { (self.ref_idx & !DIRECT_FLAG) as usize }
349 pub fn is_direct(self) -> bool { (self.ref_idx & DIRECT_FLAG) != 0 }
350 pub fn set_direct(&mut self) { self.ref_idx |= DIRECT_FLAG; }
351 fn min_pos(self, other: Self) -> Self {
352 match (self.not_avail(), other.not_avail()) {
353 (true, true) => self,
354 (false, true) => self,
355 (true, false) => other,
356 (false, false) => PicRef::new((self.ref_idx & !DIRECT_FLAG).min(other.ref_idx & !DIRECT_FLAG)),
357 }
358 }
359}
360
361impl Default for PicRef {
362 fn default() -> Self { MISSING_REF }
363}
364
365impl PartialEq for PicRef {
366 fn eq(&self, other: &Self) -> bool {
367 (self.ref_idx | DIRECT_FLAG) == (other.ref_idx | DIRECT_FLAG)
368 }
369}
370
371impl std::fmt::Display for PicRef {
372 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
373 if *self == MISSING_REF {
374 write!(f, "-1")
375 } else if *self == INVALID_REF {
376 write!(f, "-2")
377 } else {
378 write!(f, "{}", self.ref_idx & !DIRECT_FLAG)
379 }
380 }
381}
382
383#[derive(Clone,Copy,Default)]
384pub struct MBData {
385 pub mb_type: CompactMBType,
386 pub cbp: u8,
387 pub coded_flags: u32,
388 pub cmode: u8,
389 pub qp_y: u8,
390 pub qp_u: u8,
391 pub qp_v: u8,
392 pub transform_8x8: bool,
393}
394
395pub fn blk4_to_blk8(blk4: usize) -> usize {
396 const MAP: [usize; 16] = [ 0, 0, 1, 1, 0, 0, 1, 1, 2, 2, 3, 3, 2, 2, 3, 3 ];
397 MAP[blk4 & 0xF]
398}
399
400#[derive(Clone,Copy)]
401pub struct Blk8Data {
402 pub ref_idx: [PicRef; 2],
403 pub ncoded_c: [u8; 2],
404}
405
406impl Default for Blk8Data {
407 fn default() -> Self {
408 Self {
409 ref_idx: [MISSING_REF; 2],
410 ncoded_c: [0; 2],
411 }
412 }
413}
414
415#[derive(Clone,Copy,Default)]
416pub struct Blk4Data {
417 pub ncoded: u8,
418 pub ipred: IntraPredMode,
419 pub mv: [MV; 2],
420 pub mvd: [MV; 2],
421}
422
423pub struct SliceState {
424 pub mb_x: usize,
425 pub mb_y: usize,
426 pub mb_w: usize,
427 pub mb_h: usize,
428 pub mb_start: usize,
429
430 pub mb: GenericCache<MBData>,
431 pub blk8: GenericCache<Blk8Data>,
432 pub blk4: GenericCache<Blk4Data>,
433
434 pub deblock: GenericCache<u8>,
435
436 pub has_top: bool,
437 pub has_left: bool,
438}
439
440impl SliceState {
441 pub fn new() -> Self {
442 Self {
443 mb_x: 0,
444 mb_y: 0,
445 mb_w: 0,
446 mb_h: 0,
447 mb_start: 0,
448 mb: GenericCache::new(0, 0, MBData::default()),
449 blk8: GenericCache::new(0, 0, Blk8Data::default()),
450 blk4: GenericCache::new(0, 0, Blk4Data::default()),
451
452 deblock: GenericCache::new(0, 0, 0),
453
454 has_top: false,
455 has_left: false,
456 }
457 }
458 pub fn reset(&mut self, mb_w: usize, mb_h: usize, mb_pos: usize) {
459 self.mb_w = mb_w;
460 self.mb_h = mb_h;
461 self.mb_start = mb_pos;
462 if mb_w > 0 {
463 self.mb_x = mb_pos % mb_w;
464 self.mb_y = mb_pos / mb_w;
465 } else {
466 self.mb_x = 0;
467 self.mb_y = 0;
468 }
469 self.mb = GenericCache::new(1, mb_w + 2, MBData::default());
470 self.blk8 = GenericCache::new(2, mb_w * 2 + 2, Blk8Data::default());
471 self.blk4 = GenericCache::new(4, mb_w * 4 + 2, Blk4Data::default());
472
473 self.deblock = GenericCache::new(4, mb_w * 4 + 1, 0);
474
475 self.has_top = false;
476 self.has_left = false;
477 }
478 pub fn fill_deblock(&mut self, deblock_mode: u8, is_s: bool) {
479 if deblock_mode == 1 {
480 return;
481 }
482
483 let tx8x8 = self.get_cur_mb().transform_8x8;
484
485 let mut idx = self.deblock.xpos + self.mb_x * 4;
486 let cur_mbt = self.get_cur_mb().mb_type;
487 let left_mbt = self.get_left_mb().mb_type;
488 let mut top_mbt = self.get_top_mb().mb_type;
489 for y in 0..4 {
490 if tx8x8 && (y & 1) != 0 {
491 continue;
492 }
493 let can_do_top = y != 0 || (self.mb_y != 0 && (self.has_top || deblock_mode != 2));
494 if can_do_top {
495 if is_s || cur_mbt.is_intra() || top_mbt.is_intra() {
496 let val = if y == 0 { 0x40 } else { 0x30 };
497 for el in self.deblock.data[idx..][..4].iter_mut() { *el |= val; }
498 } else {
499 for x in 0..4 {
500 if self.get_cur_blk4(x).ncoded != 0 || self.get_top_blk4(x).ncoded != 0 {
501 self.deblock.data[idx + x] |= 0x20;
502 } else {
503 let cur_mv = self.get_cur_blk4(x).mv;
504 let top_mv = self.get_top_blk4(x).mv;
505 let cur_ref = self.get_cur_blk8(x / 2).ref_idx;
506 let top_ref = self.get_top_blk8(x / 2).ref_idx;
507 if mvdiff4(cur_mv[0], top_mv[0]) || mvdiff4(cur_mv[1], top_mv[1]) || cur_ref != top_ref {
508 self.deblock.data[idx + x] |= 0x10;
509 }
510 }
511 }
512 }
513 }
514 let mut lleft_mbt = left_mbt;
515 for x in 0..4 {
516 if tx8x8 && (x & 1) != 0 {
517 continue;
518 }
519 let can_do_left = x > 0 || self.has_left || (self.mb_x != 0 && deblock_mode != 2);
520 if !can_do_left {
521 continue;
522 }
523 let blk4 = x + y * 4;
524 let blk8 = x / 2 + (y / 2) * 2;
525 if is_s || cur_mbt.is_intra() || lleft_mbt.is_intra() {
526 self.deblock.data[idx + x] |= if x == 0 { 4 } else { 3 };
527 } else if self.get_cur_blk4(blk4).ncoded != 0 || self.get_top_blk4(blk4).ncoded != 0 {
528 self.deblock.data[idx + x] |= 2;
529 } else {
530 let cur_mv = self.get_cur_blk4(blk4).mv;
531 let left_mv = self.get_left_blk4(blk4).mv;
532 let cur_ref = self.get_cur_blk8(blk8).ref_idx;
533 let left_ref = self.get_left_blk8(blk8).ref_idx;
534 if mvdiff4(cur_mv[0], left_mv[0]) || mvdiff4(cur_mv[1], left_mv[1]) || cur_ref != left_ref {
535 self.deblock.data[idx + x] |= 1;
536 }
537 }
538 lleft_mbt = cur_mbt;
539 }
540 top_mbt = cur_mbt;
541 idx += self.deblock.stride;
542 }
543 }
544 pub fn next_mb(&mut self) {
545 self.mb_x += 1;
546 self.has_left = true;
547 if self.mb_x == self.mb_w {
548 self.mb_x = 0;
549 self.mb_y += 1;
550 self.mb.update_row();
551 self.blk8.update_row();
552 self.blk4.update_row();
553
554 self.deblock.update_row();
555
556 self.has_left = false;
557 }
558 self.has_top = self.mb_x + self.mb_y * self.mb_w >= self.mb_start + self.mb_w;
559 }
560 pub fn get_cur_mb_idx(&self) -> usize { self.mb.xpos + self.mb_x }
561 pub fn get_cur_blk8_idx(&self, blk_no: usize) -> usize {
562 self.blk8.xpos + self.mb_x * 2 + (blk_no & 1) + (blk_no >> 1) * self.blk8.stride
563 }
564 pub fn get_cur_blk4_idx(&self, blk_no: usize) -> usize {
565 self.blk4.xpos + self.mb_x * 4 + (blk_no & 3) + (blk_no >> 2) * self.blk4.stride
566 }
567 pub fn get_cur_mb(&mut self) -> &mut MBData {
568 let idx = self.get_cur_mb_idx();
569 &mut self.mb.data[idx]
570 }
571 pub fn get_left_mb(&self) -> &MBData {
572 &self.mb.data[self.get_cur_mb_idx() - 1]
573 }
574 pub fn get_top_mb(&self) -> &MBData {
575 &self.mb.data[self.get_cur_mb_idx() - self.mb.stride]
576 }
577 pub fn get_cur_blk8(&mut self, blk_no: usize) -> &mut Blk8Data {
578 let idx = self.get_cur_blk8_idx(blk_no);
579 &mut self.blk8.data[idx]
580 }
581 pub fn get_left_blk8(&self, blk_no: usize) -> &Blk8Data {
582 &self.blk8.data[self.get_cur_blk8_idx(blk_no) - 1]
583 }
584 pub fn get_top_blk8(&self, blk_no: usize) -> &Blk8Data {
585 &self.blk8.data[self.get_cur_blk8_idx(blk_no) - self.blk8.stride]
586 }
587 pub fn get_cur_blk4(&mut self, blk_no: usize) -> &mut Blk4Data {
588 let idx = self.get_cur_blk4_idx(blk_no);
589 &mut self.blk4.data[idx]
590 }
591 pub fn get_left_blk4(&self, blk_no: usize) -> &Blk4Data {
592 &self.blk4.data[self.get_cur_blk4_idx(blk_no) - 1]
593 }
594 pub fn get_top_blk4(&self, blk_no: usize) -> &Blk4Data {
595 &self.blk4.data[self.get_cur_blk4_idx(blk_no) - self.blk4.stride]
596 }
597
598 pub fn apply_to_blk8<F: (Fn(&mut Blk8Data))>(&mut self, f: F) {
599 let start = self.get_cur_blk8_idx(0);
600 for row in self.blk8.data[start..].chunks_mut(self.blk8.stride).take(2) {
601 for el in row[..2].iter_mut() {
602 f(el);
603 }
604 }
605 }
606 pub fn apply_to_blk4<F: (Fn(&mut Blk4Data))>(&mut self, f: F) {
607 let start = self.get_cur_blk4_idx(0);
608 for row in self.blk4.data[start..].chunks_mut(self.blk4.stride).take(4) {
609 for el in row[..4].iter_mut() {
610 f(el);
611 }
612 }
613 }
614
615 pub fn fill_ipred(&mut self, imode: IntraPredMode) {
616 self.apply_to_blk4(|blk| blk.ipred = imode);
617 }
618 pub fn fill_ncoded(&mut self, nc: u8) {
619 self.apply_to_blk4(|blk| blk.ncoded = nc);
620 self.apply_to_blk8(|blk| blk.ncoded_c = [nc; 2]);
621 }
622 pub fn reset_mb_mv(&mut self) {
623 self.apply_to_blk8(|blk| blk.ref_idx = [INVALID_REF; 2]);
624 }
625
626 pub fn get_mv_ctx(&self, xoff: usize, yoff: usize, ref_l: usize) -> (usize, usize) {
627 let blk_no = xoff / 4 + yoff;
628 let mv_a = self.get_left_blk4(blk_no).mvd[ref_l];
629 let mv_b = self.get_top_blk4(blk_no).mvd[ref_l];
630 let mv = mv_a + mv_b;
631 let ctx0 = if mv.x < 3 { 0 } else if mv.x <= 32 { 1 } else { 2 };
632 let ctx1 = if mv.y < 3 { 0 } else if mv.y <= 32 { 1 } else { 2 };
633 (ctx0, ctx1)
634 }
635 pub fn get_mv_ref_ctx(&self, xoff: usize, yoff: usize, ref_l: usize) -> usize {
636 let blk_no = xoff / 8 + (yoff / 8) * 2;
637 let mut ctx = 0;
638 let left_ref = self.get_left_blk8(blk_no).ref_idx[ref_l];
639 let top_ref = self.get_top_blk8(blk_no).ref_idx[ref_l];
640 if !left_ref.not_avail() && !left_ref.is_direct() && left_ref.index() > 0 {
641 ctx += 1;
642 }
643 if !top_ref.not_avail() && !top_ref.is_direct() && top_ref.index() > 0 {
644 ctx += 2;
645 }
646 ctx
647 }
4a1ca15c 648 #[allow(clippy::if_same_then_else)]
696e4e20
KS
649 pub fn predict(&mut self, xpos: usize, ypos: usize, bw: usize, bh: usize, ref_l: usize, diff_mv: MV, ref_idx: PicRef) {
650 let midx = self.get_cur_blk4_idx(0) + xpos / 4 + ypos / 4 * self.blk4.stride;
651 let ridx = self.get_cur_blk8_idx(0) + xpos / 8 + ypos / 8 * self.blk8.stride;
652 let ridx_c = self.get_cur_blk8_idx(0) + (xpos + bw) / 8 + ypos / 8 * self.blk8.stride - if (ypos & 4) == 0 { self.blk8.stride } else { 0 };
653
654 let mv_a = self.blk4.data[midx - 1].mv[ref_l];
655 let mv_b = self.blk4.data[midx - self.blk4.stride].mv[ref_l];
656 let mut mv_c = self.blk4.data[midx - self.blk4.stride + bw / 4].mv[ref_l];
657
658 let rx = if (xpos & 4) != 0 { 0 } else { 1 };
659 let ry = if (ypos & 4) != 0 { 0 } else { self.blk8.stride };
660 let ref_a = self.blk8.data[ridx - rx].ref_idx[ref_l];
661 let ref_b = self.blk8.data[ridx - ry].ref_idx[ref_l];
662 let mut ref_c = self.blk8.data[ridx_c].ref_idx[ref_l];
663
664 if ref_c == MISSING_REF || (((xpos + bw) & 4) == 0 && (ypos & 4) != 0) {
665 mv_c = self.blk4.data[midx - self.blk4.stride - 1].mv[ref_l];
666 ref_c = self.blk8.data[ridx - rx - ry].ref_idx[ref_l];
667 }
668
669 let pred_mv = if bw == 16 && bh == 8 && ypos == 0 && ref_b == ref_idx {
670 mv_b
671 } else if bw == 16 && bh == 8 && ypos != 0 && ref_a == ref_idx {
672 mv_a
673 } else if bw == 8 && bh == 16 && xpos == 0 && ref_a == ref_idx {
674 mv_a
675 } else if bw == 8 && bh == 16 && xpos != 0 && ref_c == ref_idx {
676 mv_c
677 } else if ref_b == MISSING_REF && ref_c == MISSING_REF {
678 mv_a
679 } else {
680 let count = ((ref_a == ref_idx) as u8) + ((ref_b == ref_idx) as u8) + ((ref_c == ref_idx) as u8);
681 if count == 1 {
682 if ref_a == ref_idx {
683 mv_a
684 } else if ref_b == ref_idx {
685 mv_b
686 } else {
687 mv_c
688 }
689 } else {
690 MV::pred(mv_a, mv_b, mv_c)
691 }
692 };
693
694 let mv = pred_mv + diff_mv;
695 self.fill_mv (xpos, ypos, bw, bh, ref_l, mv);
696 self.fill_ref(xpos, ypos, bw, bh, ref_l, ref_idx);
697 }
698 pub fn predict_pskip(&mut self) {
699 let midx = self.get_cur_blk4_idx(0);
700 let ridx = self.get_cur_blk8_idx(0);
701
702 let mv_a = self.blk4.data[midx - 1].mv[0];
703 let mv_b = self.blk4.data[midx - self.blk4.stride].mv[0];
704 let mut mv_c = self.blk4.data[midx - self.blk4.stride + 4].mv[0];
705
706 let ref_a = self.blk8.data[ridx - 1].ref_idx[0];
707 let ref_b = self.blk8.data[ridx - self.blk8.stride].ref_idx[0];
708 let mut ref_c = self.blk8.data[ridx - self.blk8.stride + 2].ref_idx[0];
709
710 if ref_c == MISSING_REF {
711 mv_c = self.blk4.data[midx - self.blk4.stride - 1].mv[0];
712 ref_c = self.blk8.data[ridx - self.blk8.stride - 1].ref_idx[0];
713 }
714
715 let ref_idx = ZERO_REF;
716 let mv = if ref_a == MISSING_REF || ref_b == MISSING_REF || (ref_a == ZERO_REF && mv_a == ZERO_MV) || (ref_b == ZERO_REF && mv_b == ZERO_MV) {
717 ZERO_MV
718 } else {
719 let count = ((ref_a == ref_idx) as u8) + ((ref_b == ref_idx) as u8) + ((ref_c == ref_idx) as u8);
720 if count == 1 {
721 if ref_a == ref_idx {
722 mv_a
723 } else if ref_b == ref_idx {
724 mv_b
725 } else {
726 mv_c
727 }
728 } else {
729 MV::pred(mv_a, mv_b, mv_c)
730 }
731 };
732
733 self.fill_mv (0, 0, 16, 16, 0, mv);
734 self.fill_ref(0, 0, 16, 16, 0, ref_idx);
735 }
736 pub fn predict_direct_mb(&mut self, frame_refs: &FrameRefs, temporal_mv: bool, cur_id: u16) {
737 let (col_mb, _, _) = frame_refs.get_colocated_info(self.mb_x, self.mb_y);
738 if col_mb.mb_type.is_16x16() || !temporal_mv {
739 let (mv0, ref0, mv1, ref1) = self.get_direct_mv(frame_refs, temporal_mv, cur_id, 0);
740 self.apply_to_blk4(|blk4| blk4.mv = [mv0, mv1]);
741 self.apply_to_blk8(|blk8| blk8.ref_idx = [ref0, ref1]);
742 } else {
743 for blk4 in 0..16 {
744 let (mv0, ref0, mv1, ref1) = self.get_direct_mv(frame_refs, temporal_mv, cur_id, blk4);
745 self.get_cur_blk4(blk4).mv = [mv0, mv1];
746 self.get_cur_blk8(blk4_to_blk8(blk4)).ref_idx = [ref0, ref1];
747 }
748 }
749 }
750 pub fn predict_direct_sub(&mut self, frame_refs: &FrameRefs, temporal_mv: bool, cur_id: u16, blk4: usize) {
4a1ca15c
KS
751 let (mv0, ref0, mv1, ref1) = self.get_direct_mv(frame_refs, temporal_mv, cur_id, blk4);
752 self.get_cur_blk4(blk4).mv = [mv0, mv1];
753 self.get_cur_blk8(blk4_to_blk8(blk4)).ref_idx = [ref0, ref1];
696e4e20
KS
754 }
755 pub fn get_direct_mv(&self, frame_refs: &FrameRefs, temporal_mv: bool, cur_id: u16, blk4: usize) -> (MV, PicRef, MV, PicRef) {
756 let (mbi, r1_poc, r1_long) = frame_refs.get_colocated_info(self.mb_x, self.mb_y);
757 let blk8 = blk4_to_blk8(blk4);
758 let (col_mv, r0_poc, col_idx) = if mbi.ref_poc[blk8] == [MISSING_POC; 2] {
759 (ZERO_MV, MISSING_POC, MISSING_REF)
760 } else if mbi.ref_poc[blk8][0] != MISSING_POC {
761 (mbi.mv[blk4][0], mbi.ref_poc[blk8][0], mbi.ref_idx[blk8][0])
762 } else {
763 (mbi.mv[blk4][1], mbi.ref_poc[blk8][1], mbi.ref_idx[blk8][1])
764 };
765 let (col_ref, r0_long) = frame_refs.map_ref0(r0_poc);
766 if temporal_mv {
767 let td = (i32::from(r1_poc) - i32::from(r0_poc)).max(-128).min(127);
768 if r0_long || td == 0 {
769 (col_mv, col_ref, ZERO_MV, ZERO_REF)
770 } else {
771 let tx = (16384 + (td / 2).abs()) / td;
772 let tb = (i32::from(cur_id) - i32::from(r0_poc)).max(-128).min(127);
773 let scale = ((tb * tx + 32) >> 6).max(-1024).min(1023);
774 let mv0 = MV {
775 x: ((i32::from(col_mv.x) * scale + 128) >> 8) as i16,
776 y: ((i32::from(col_mv.y) * scale + 128) >> 8) as i16,
777 };
778 let mv1 = mv0 - col_mv;
779 (mv0, col_ref, mv1, ZERO_REF)
780 }
781 } else {
782 let blk4 = 0; // we generate the same MV prediction for the whole MB
783 let blk8 = blk4_to_blk8(blk4);
784 let midx = self.get_cur_blk4_idx(blk4);
785 let ridx = self.get_cur_blk8_idx(blk8);
786 let ridx_c = self.get_cur_blk8_idx(blk8) + 16 / 8 - self.blk8.stride;
787
788 let mv_a = self.blk4.data[midx - 1].mv;
789 let mv_b = self.blk4.data[midx - self.blk4.stride].mv;
790 let mut mv_c = self.blk4.data[midx - self.blk4.stride + 16 / 4].mv;
791
792 let ref_a = self.blk8.data[ridx - 1].ref_idx;
793 let ref_b = self.blk8.data[ridx - self.blk8.stride].ref_idx;
794 let mut ref_c = self.blk8.data[ridx_c].ref_idx;
795
796 if ref_c == [MISSING_REF; 2] {
797 mv_c = self.blk4.data[midx - self.blk4.stride - 1].mv;
798 ref_c = self.blk8.data[ridx - self.blk8.stride - 1].ref_idx;
799 }
800 let mut refs = [INVALID_REF; 2];
801 for cur_ref in [ref_a, ref_b, ref_c].iter() {
802 refs[0] = refs[0].min_pos(cur_ref[0]);
803 refs[1] = refs[1].min_pos(cur_ref[1]);
804 }
805 if refs == [INVALID_REF; 2] {
806 return (ZERO_MV, ZERO_REF, ZERO_MV, ZERO_REF);
807 }
808
809 let mut col_zero = true;
810 if r1_long || col_idx != ZERO_REF {
811 col_zero = false;
812 }
813 if col_mv.x.abs() > 1 || col_mv.y.abs() > 1 {
814 col_zero = false;
815 }
816 let mut mvs = [ZERO_MV; 2];
817 for ref_l in 0..2 {
818 if mbi.mb_type.is_intra() || (!refs[ref_l].not_avail() && !(refs[ref_l] == ZERO_REF && col_zero)) {
819 let ref_idx = refs[ref_l];
820 mvs[ref_l] = if ref_b[ref_l] == MISSING_REF && ref_c[ref_l] == MISSING_REF {
821 mv_a[ref_l]
822 } else {
823 let count = ((ref_a[ref_l] == ref_idx) as u8) + ((ref_b[ref_l] == ref_idx) as u8) + ((ref_c[ref_l] == ref_idx) as u8);
824 if count == 1 {
825 if ref_a[ref_l] == ref_idx {
826 mv_a[ref_l]
827 } else if ref_b[ref_l] == ref_idx {
828 mv_b[ref_l]
829 } else {
830 mv_c[ref_l]
831 }
832 } else {
833 MV::pred(mv_a[ref_l], mv_b[ref_l], mv_c[ref_l])
834 }
835 };
836 }
837 }
838 (mvs[0], refs[0], mvs[1], refs[1])
839 }
840 }
841 pub fn fill_mv(&mut self, xpos: usize, ypos: usize, bw: usize, bh: usize, ref_l: usize, mv: MV) {
842 let start = self.get_cur_blk4_idx(0) + xpos / 4 + ypos / 4 * self.blk4.stride;
843 for row in self.blk4.data[start..].chunks_mut(self.blk4.stride).take(bh / 4) {
844 for blk in row[..bw / 4].iter_mut() {
845 blk.mv[ref_l] = mv;
846 }
847 }
848 }
849 pub fn fill_mvd(&mut self, xpos: usize, ypos: usize, bw: usize, bh: usize, ref_l: usize, mv: MV) {
850 let mvd = MV{ x: mv.x.abs().min(128), y: mv.y.abs().min(128) };
851 let start = self.get_cur_blk4_idx(0) + xpos / 4 + ypos / 4 * self.blk4.stride;
852 for row in self.blk4.data[start..].chunks_mut(self.blk4.stride).take(bh / 4) {
853 for blk in row[..bw / 4].iter_mut() {
854 blk.mvd[ref_l] = mvd;
855 }
856 }
857 }
858 pub fn fill_ref(&mut self, xpos: usize, ypos: usize, bw: usize, bh: usize, ref_l: usize, ref_idx: PicRef) {
859 let start = self.get_cur_blk8_idx(0) + xpos / 8 + ypos / 8 * self.blk8.stride;
860 if bw < 8 || bh < 8 {
861 self.blk8.data[start].ref_idx[ref_l] = ref_idx;
862 } else {
863 for row in self.blk8.data[start..].chunks_mut(self.blk8.stride).take(bh / 8) {
864 for blk in row[..bw / 8].iter_mut() {
865 blk.ref_idx[ref_l] = ref_idx;
866 }
867 }
868 }
869 }
870}
871
872fn mvdiff4(mv1: MV, mv2: MV) -> bool {
873 let mv = mv1 - mv2;
874 (mv.x.abs() >= 4) || (mv.y.abs() >= 4)
875}