aac: fix intensity stereo reconstruction for ms_mask_present=0 case
[nihav.git] / nihav-core / src / scale / mod.rs
CommitLineData
30d57e4a
KS
1//! Image conversion functionality.
2
3//! # Examples
4//!
5//! Convert input image into YUV one and scale down two times.
6//! ```no_run
7//! use nihav_core::scale::*;
8//! use nihav_core::formats::{RGB24_FORMAT, YUV420_FORMAT};
9//! use nihav_core::frame::{alloc_video_buffer, NAVideoInfo};
10//!
11//! let mut in_pic = alloc_video_buffer(NAVideoInfo::new(640, 480, false, RGB24_FORMAT), 4).unwrap();
12//! let mut out_pic = alloc_video_buffer(NAVideoInfo::new(320, 240, false, YUV420_FORMAT), 4).unwrap();
13//! let in_fmt = get_scale_fmt_from_pic(&in_pic);
14//! let out_fmt = get_scale_fmt_from_pic(&out_pic);
15//! let mut scaler = NAScale::new(in_fmt, out_fmt).unwrap();
16//! scaler.convert(&in_pic, &mut out_pic).unwrap();
17//! ```
03accf76
KS
18use crate::frame::*;
19
20mod kernel;
21
22mod colorcvt;
23mod repack;
b36f412c 24#[allow(clippy::module_inception)]
03accf76
KS
25mod scale;
26
4b459d0b
KS
27mod palette;
28
29pub use crate::scale::palette::{palettise_frame, QuantisationMode, PaletteSearchMode};
30
30d57e4a 31/// Image format information used by the converter.
03accf76
KS
32#[derive(Clone,Copy,PartialEq)]
33pub struct ScaleInfo {
30d57e4a 34 /// Pixel format description.
03accf76 35 pub fmt: NAPixelFormaton,
30d57e4a 36 /// Image width.
03accf76 37 pub width: usize,
30d57e4a 38 /// Image height.
03accf76
KS
39 pub height: usize,
40}
41
42impl std::fmt::Display for ScaleInfo {
43 fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
44 write!(f, "({}x{}, {})", self.width, self.height, self.fmt)
45 }
46}
47
30d57e4a 48/// A list specifying general image conversion errors.
03accf76
KS
49#[derive(Debug,Clone,Copy,PartialEq)]
50#[allow(dead_code)]
51pub enum ScaleError {
30d57e4a 52 /// Input or output buffer contains no image data.
03accf76 53 NoFrame,
30d57e4a 54 /// Allocation failed.
03accf76 55 AllocError,
30d57e4a 56 /// Invalid argument.
03accf76 57 InvalidArgument,
30d57e4a 58 /// Feature is not implemented.
03accf76 59 NotImplemented,
30d57e4a 60 /// Internal implementation bug.
03accf76
KS
61 Bug,
62}
63
30d57e4a 64/// A specialised `Result` type for image conversion operations.
03accf76
KS
65pub type ScaleResult<T> = Result<T, ScaleError>;
66
67/*trait Kernel {
68 fn init(&mut self, in_fmt: &ScaleInfo, dest_fmt: &ScaleInfo) -> ScaleResult<NABufferType>;
69 fn process(&mut self, pic_in: &NABufferType, pic_out: &mut NABufferType);
70}*/
71
72struct KernelDesc {
73 name: &'static str,
6011e201 74 create: fn () -> Box<dyn kernel::Kernel>,
03accf76
KS
75}
76
77impl KernelDesc {
6011e201 78 fn find(name: &str) -> ScaleResult<Box<dyn kernel::Kernel>> {
03accf76
KS
79 for kern in KERNELS.iter() {
80 if kern.name == name {
81 return Ok((kern.create)());
82 }
83 }
84 Err(ScaleError::InvalidArgument)
85 }
86}
87
88const KERNELS: &[KernelDesc] = &[
89 KernelDesc { name: "pack", create: repack::create_pack },
90 KernelDesc { name: "unpack", create: repack::create_unpack },
91 KernelDesc { name: "depal", create: repack::create_depal },
4b459d0b 92 KernelDesc { name: "palette", create: palette::create_palettise },
03accf76
KS
93 KernelDesc { name: "scale", create: scale::create_scale },
94 KernelDesc { name: "rgb_to_yuv", create: colorcvt::create_rgb2yuv },
95 KernelDesc { name: "yuv_to_rgb", create: colorcvt::create_yuv2rgb },
96];
97
98struct Stage {
99 fmt_out: ScaleInfo,
100 tmp_pic: NABufferType,
101 next: Option<Box<Stage>>,
6011e201 102 worker: Box<dyn kernel::Kernel>,
03accf76
KS
103}
104
30d57e4a 105/// Converts input picture information into format used by scaler.
03accf76
KS
106pub fn get_scale_fmt_from_pic(pic: &NABufferType) -> ScaleInfo {
107 let info = pic.get_video_info().unwrap();
108 ScaleInfo { fmt: info.get_format(), width: info.get_width(), height: info.get_height() }
109}
110
111impl Stage {
25e0bf9a 112 fn new(name: &str, in_fmt: &ScaleInfo, dest_fmt: &ScaleInfo, options: &[(String, String)]) -> ScaleResult<Self> {
03accf76 113 let mut worker = KernelDesc::find(name)?;
25e0bf9a 114 let tmp_pic = worker.init(in_fmt, dest_fmt, options)?;
03accf76
KS
115 let fmt_out = get_scale_fmt_from_pic(&tmp_pic);
116 Ok(Self { fmt_out, tmp_pic, next: None, worker })
117 }
118 fn add(&mut self, new: Stage) {
119 if let Some(ref mut next) = self.next {
120 next.add(new);
121 } else {
122 self.next = Some(Box::new(new));
123 }
124 }
125 fn process(&mut self, pic_in: &NABufferType, pic_out: &mut NABufferType) -> ScaleResult<()> {
126 if let Some(ref mut nextstage) = self.next {
127 self.worker.process(pic_in, &mut self.tmp_pic);
128 nextstage.process(&self.tmp_pic, pic_out)?;
129 } else {
130 self.worker.process(pic_in, pic_out);
131 }
132 Ok(())
133 }
134 fn drop_last_tmp(&mut self) {
135 if let Some(ref mut nextstage) = self.next {
136 nextstage.drop_last_tmp();
137 } else {
138 self.tmp_pic = NABufferType::None;
139 }
140 }
141}
142
30d57e4a 143/// Image format converter.
03accf76
KS
144pub struct NAScale {
145 fmt_in: ScaleInfo,
146 fmt_out: ScaleInfo,
147 just_convert: bool,
148 pipeline: Option<Stage>,
149}
150
151fn check_format(in_fmt: NAVideoInfo, ref_fmt: &ScaleInfo, just_convert: bool) -> ScaleResult<()> {
152 if in_fmt.get_format() != ref_fmt.fmt { return Err(ScaleError::InvalidArgument); }
153 if !just_convert && (in_fmt.get_width() != ref_fmt.width || in_fmt.get_height() != ref_fmt.height) {
154 return Err(ScaleError::InvalidArgument);
155 }
156 Ok(())
157}
158
159fn copy(pic_in: &NABufferType, pic_out: &mut NABufferType)
160{
161 if let (Some(ref sbuf), Some(ref mut dbuf)) = (pic_in.get_vbuf(), pic_out.get_vbuf()) {
94c520bf
KS
162 if sbuf.get_info().get_format().is_paletted() {
163 let same = sbuf.get_stride(0) == dbuf.get_stride(0) && sbuf.get_offset(1) == dbuf.get_offset(1);
164 if same {
165 let src = sbuf.get_data();
166 let dst = dbuf.get_data_mut().unwrap();
167 dst.copy_from_slice(src);
168 } else {
169 let (_, h) = sbuf.get_dimensions(0);
170 let soff = sbuf.get_offset(0);
171 let spoff = sbuf.get_offset(1);
172 let sstride = sbuf.get_stride(0);
173 let src = sbuf.get_data();
174 let doff = dbuf.get_offset(0);
175 let dpoff = dbuf.get_offset(1);
176 let dstride = dbuf.get_stride(0);
177 let dst = dbuf.get_data_mut().unwrap();
178 let copy_size = sstride.min(dstride);
179 for (dline, sline) in dst[doff..].chunks_exact_mut(dstride).take(h).zip(src[soff..].chunks_exact(sstride)) {
180 dline[..copy_size].copy_from_slice(&sline[..copy_size]);
181 }
182 dst[dpoff..].copy_from_slice(&src[spoff..]);
183 }
184 return;
185 }
79ec1d51
KS
186 let mut same = true;
187 let num_components = sbuf.get_info().get_format().get_num_comp();
188 for i in 0..num_components {
189 if sbuf.get_stride(i) != dbuf.get_stride(i) {
190 same = false;
191 break;
192 }
193 if sbuf.get_offset(i) != dbuf.get_offset(i) {
194 same = false;
195 break;
196 }
197 }
198 if same {
199 let sdata = sbuf.get_data();
200 let ddata = dbuf.get_data_mut().unwrap();
201 ddata.copy_from_slice(&sdata[0..]);
202 } else {
203 let sdata = sbuf.get_data();
204 for comp in 0..num_components {
205 let (_, h) = sbuf.get_dimensions(comp);
206 let src = &sdata[sbuf.get_offset(comp)..];
207 let sstride = sbuf.get_stride(comp);
208 let doff = dbuf.get_offset(comp);
209 let dstride = dbuf.get_stride(comp);
4de972c7
KS
210 let ddata = dbuf.get_data_mut().unwrap();
211 let dst = &mut ddata[doff..];
212 let copy_size = sstride.min(dstride);
213 for (dline, sline) in dst.chunks_exact_mut(dstride).take(h).zip(src.chunks_exact(sstride)) {
e6aaad5c 214 dline[..copy_size].copy_from_slice(&sline[..copy_size]);
4de972c7
KS
215 }
216 }
217 }
218 } else if let (Some(ref sbuf), Some(ref mut dbuf)) = (pic_in.get_vbuf16(), pic_out.get_vbuf16()) {
219 let mut same = true;
220 let num_components = sbuf.get_info().get_format().get_num_comp();
221 for i in 0..num_components {
222 if sbuf.get_stride(i) != dbuf.get_stride(i) {
223 same = false;
224 break;
225 }
226 if sbuf.get_offset(i) != dbuf.get_offset(i) {
227 same = false;
228 break;
229 }
230 }
231 if same {
232 let sdata = sbuf.get_data();
233 let ddata = dbuf.get_data_mut().unwrap();
234 ddata.copy_from_slice(&sdata[0..]);
235 } else {
236 let sdata = sbuf.get_data();
237 for comp in 0..num_components {
238 let (_, h) = sbuf.get_dimensions(comp);
239 let src = &sdata[sbuf.get_offset(comp)..];
240 let sstride = sbuf.get_stride(comp);
241 let doff = dbuf.get_offset(comp);
242 let dstride = dbuf.get_stride(comp);
79ec1d51
KS
243 let ddata = dbuf.get_data_mut().unwrap();
244 let dst = &mut ddata[doff..];
245 let copy_size = sstride.min(dstride);
246 for (dline, sline) in dst.chunks_exact_mut(dstride).take(h).zip(src.chunks_exact(sstride)) {
e6aaad5c 247 dline[..copy_size].copy_from_slice(&sline[..copy_size]);
79ec1d51
KS
248 }
249 }
250 }
03accf76
KS
251 } else {
252 unimplemented!();
253 }
254}
255
256macro_rules! add_stage {
257 ($head:expr, $new:expr) => {
258 if let Some(ref mut h) = $head {
259 h.add($new);
260 } else {
261 $head = Some($new);
262 }
263 };
264}
265fn is_better_fmt(a: &ScaleInfo, b: &ScaleInfo) -> bool {
266 if (a.width >= b.width) && (a.height >= b.height) {
267 return true;
268 }
269 if a.fmt.get_max_depth() > b.fmt.get_max_depth() {
270 return true;
271 }
272 if a.fmt.get_max_subsampling() < b.fmt.get_max_subsampling() {
273 return true;
274 }
275 false
276}
5f031d75
KS
277fn fmt_needs_scale(ifmt: &NAPixelFormaton, ofmt: &NAPixelFormaton) -> bool {
278 for (ichr, ochr) in ifmt.comp_info.iter().zip(ofmt.comp_info.iter()) {
279 if let (Some(ic), Some(oc)) = (ichr, ochr) {
280 if ic.h_ss != oc.h_ss || ic.v_ss != oc.v_ss {
281 return true;
282 }
283 }
284 }
285 false
286}
25e0bf9a
KS
287fn build_pipeline(ifmt: &ScaleInfo, ofmt: &ScaleInfo, just_convert: bool, options: &[(String, String)]) -> ScaleResult<Option<Stage>> {
288 let mut debug = false;
289 for (name, value) in options.iter() {
6f263099 290 if name == "debug" && (value.is_empty() || value == "true") {
25e0bf9a
KS
291 debug = true;
292 break;
293 }
294 }
295
03accf76
KS
296 let inname = ifmt.fmt.get_model().get_short_name();
297 let outname = ofmt.fmt.get_model().get_short_name();
298
25e0bf9a
KS
299 if debug {
300 println!("convert {} -> {}", ifmt, ofmt);
301 }
5f031d75 302 let needs_scale = if fmt_needs_scale(&ifmt.fmt, &ofmt.fmt) {
e243ceb4
KS
303 true
304 } else {
305 !just_convert
306 };
e9d8cce7 307 let needs_unpack = !ifmt.fmt.is_unpacked();
03accf76 308 let needs_pack = !ofmt.fmt.is_unpacked();
e243ceb4 309 let needs_convert = inname != outname;
6f263099 310 let scale_before_cvt = is_better_fmt(ifmt, ofmt) && needs_convert
03accf76 311 && (ofmt.fmt.get_max_subsampling() == 0);
4b459d0b 312 let needs_palettise = ofmt.fmt.palette;
03accf76
KS
313//todo stages for model and gamma conversion
314
315 let mut stages: Option<Stage> = None;
316 let mut cur_fmt = *ifmt;
317
318 if needs_unpack {
25e0bf9a
KS
319 if debug {
320 println!("[adding unpack]");
321 }
e243ceb4 322 let new_stage = if !cur_fmt.fmt.is_paletted() {
6f263099 323 Stage::new("unpack", &cur_fmt, ofmt, options)?
e243ceb4 324 } else {
6f263099 325 Stage::new("depal", &cur_fmt, ofmt, options)?
e243ceb4 326 };
03accf76
KS
327 cur_fmt = new_stage.fmt_out;
328 add_stage!(stages, new_stage);
329 }
330 if needs_scale && scale_before_cvt {
25e0bf9a
KS
331 if debug {
332 println!("[adding scale]");
333 }
6f263099 334 let new_stage = Stage::new("scale", &cur_fmt, ofmt, options)?;
03accf76
KS
335 cur_fmt = new_stage.fmt_out;
336 add_stage!(stages, new_stage);
337 }
338 if needs_convert {
25e0bf9a
KS
339 if debug {
340 println!("[adding convert]");
341 }
03accf76 342 let cvtname = format!("{}_to_{}", inname, outname);
25e0bf9a
KS
343 if debug {
344 println!("[{}]", cvtname);
345 }
6f263099 346 let new_stage = Stage::new(&cvtname, &cur_fmt, ofmt, options)?;
03accf76
KS
347//todo if fails try converting via RGB or YUV
348 cur_fmt = new_stage.fmt_out;
349 add_stage!(stages, new_stage);
350//todo alpha plane copy/add
351 }
352 if needs_scale && !scale_before_cvt {
25e0bf9a
KS
353 if debug {
354 println!("[adding scale]");
355 }
6f263099 356 let new_stage = Stage::new("scale", &cur_fmt, ofmt, options)?;
03accf76
KS
357 cur_fmt = new_stage.fmt_out;
358 add_stage!(stages, new_stage);
359 }
4b459d0b 360 if needs_pack && !needs_palettise {
25e0bf9a
KS
361 if debug {
362 println!("[adding pack]");
363 }
6f263099 364 let new_stage = Stage::new("pack", &cur_fmt, ofmt, options)?;
03accf76
KS
365 //cur_fmt = new_stage.fmt_out;
366 add_stage!(stages, new_stage);
367 }
4b459d0b 368 if needs_palettise {
25e0bf9a
KS
369 if debug {
370 println!("[adding palettise]");
371 }
6f263099 372 let new_stage = Stage::new("palette", &cur_fmt, ofmt, options)?;
4b459d0b
KS
373 //cur_fmt = new_stage.fmt_out;
374 add_stage!(stages, new_stage);
375 }
03accf76
KS
376
377 if let Some(ref mut head) = stages {
378 head.drop_last_tmp();
379 }
d24468d9 380
03accf76
KS
381 Ok(stages)
382}
383
085742a3
KS
384fn swap_plane<T:Copy>(data: &mut [T], stride: usize, h: usize, line0: &mut [T], line1: &mut [T]) {
385 let mut doff0 = 0;
386 let mut doff1 = stride * (h - 1);
387 for _ in 0..h/2 {
388 line0.copy_from_slice(&data[doff0..][..stride]);
389 line1.copy_from_slice(&data[doff1..][..stride]);
e6aaad5c
KS
390 data[doff1..][..stride].copy_from_slice(line0);
391 data[doff0..][..stride].copy_from_slice(line1);
085742a3
KS
392 doff0 += stride;
393 doff1 -= stride;
394 }
395}
396
30d57e4a 397/// Flips the picture contents.
085742a3
KS
398pub fn flip_picture(pic: &mut NABufferType) -> ScaleResult<()> {
399 match pic {
400 NABufferType::Video(ref mut vb) => {
401 let ncomp = vb.get_num_components();
402 for comp in 0..ncomp {
403 let off = vb.get_offset(comp);
404 let stride = vb.get_stride(comp);
405 let (_, h) = vb.get_dimensions(comp);
406 let data = vb.get_data_mut().unwrap();
407 let mut line0 = vec![0; stride];
408 let mut line1 = vec![0; stride];
409 swap_plane(&mut data[off..], stride, h, line0.as_mut_slice(), line1.as_mut_slice());
410 }
411 },
412 NABufferType::Video16(ref mut vb) => {
36294f20 413 let ncomp = vb.get_num_components().max(1);
085742a3
KS
414 for comp in 0..ncomp {
415 let off = vb.get_offset(comp);
416 let stride = vb.get_stride(comp);
417 let (_, h) = vb.get_dimensions(comp);
418 let data = vb.get_data_mut().unwrap();
419 let mut line0 = vec![0; stride];
420 let mut line1 = vec![0; stride];
421 swap_plane(&mut data[off..], stride, h, line0.as_mut_slice(), line1.as_mut_slice());
422 }
423 },
424 NABufferType::Video32(ref mut vb) => {
36294f20 425 let ncomp = vb.get_num_components().max(1);
085742a3
KS
426 for comp in 0..ncomp {
427 let off = vb.get_offset(comp);
428 let stride = vb.get_stride(comp);
429 let (_, h) = vb.get_dimensions(comp);
430 let data = vb.get_data_mut().unwrap();
431 let mut line0 = vec![0; stride];
432 let mut line1 = vec![0; stride];
433 swap_plane(&mut data[off..], stride, h, line0.as_mut_slice(), line1.as_mut_slice());
434 }
435 },
436 NABufferType::VideoPacked(ref mut vb) => {
437 let ncomp = vb.get_num_components();
438 for comp in 0..ncomp {
439 let off = vb.get_offset(comp);
440 let stride = vb.get_stride(comp);
441 let (_, h) = vb.get_dimensions(comp);
442 let data = vb.get_data_mut().unwrap();
5737aac9
KS
443 let mut line0 = vec![0; stride];
444 let mut line1 = vec![0; stride];
445 swap_plane(&mut data[off..], stride, h, line0.as_mut_slice(), line1.as_mut_slice());
446 }
447 if ncomp == 0 && vb.get_stride(0) != 0 {
448 let off = vb.get_offset(0);
449 let stride = vb.get_stride(0);
450 let (_, h) = vb.get_dimensions(0);
451 let data = vb.get_data_mut().unwrap();
085742a3
KS
452 let mut line0 = vec![0; stride];
453 let mut line1 = vec![0; stride];
454 swap_plane(&mut data[off..], stride, h, line0.as_mut_slice(), line1.as_mut_slice());
455 }
456 },
457 _ => { return Err(ScaleError::InvalidArgument); },
458 };
459 Ok(())
460}
461
03accf76 462impl NAScale {
30d57e4a 463 /// Constructs a new `NAScale` instance.
03accf76 464 pub fn new(fmt_in: ScaleInfo, fmt_out: ScaleInfo) -> ScaleResult<Self> {
03accf76 465 let just_convert = (fmt_in.width == fmt_out.width) && (fmt_in.height == fmt_out.height);
6f263099
KS
466 let pipeline = if fmt_in != fmt_out {
467 build_pipeline(&fmt_in, &fmt_out, just_convert, &[])?
468 } else {
469 None
470 };
25e0bf9a
KS
471 Ok(Self { fmt_in, fmt_out, just_convert, pipeline })
472 }
473 /// Constructs a new `NAScale` instance taking into account provided options.
474 pub fn new_with_options(fmt_in: ScaleInfo, fmt_out: ScaleInfo, options: &[(String, String)]) -> ScaleResult<Self> {
25e0bf9a 475 let just_convert = (fmt_in.width == fmt_out.width) && (fmt_in.height == fmt_out.height);
6f263099
KS
476 let pipeline = if fmt_in != fmt_out {
477 build_pipeline(&fmt_in, &fmt_out, just_convert, options)?
478 } else {
479 None
480 };
03accf76
KS
481 Ok(Self { fmt_in, fmt_out, just_convert, pipeline })
482 }
30d57e4a 483 /// Checks whether requested conversion operation is needed at all.
03accf76 484 pub fn needs_processing(&self) -> bool { self.pipeline.is_some() }
30d57e4a 485 /// Returns the input image format.
03accf76 486 pub fn get_in_fmt(&self) -> ScaleInfo { self.fmt_in }
30d57e4a 487 /// Returns the output image format.
03accf76 488 pub fn get_out_fmt(&self) -> ScaleInfo { self.fmt_out }
30d57e4a 489 /// Performs the image format conversion.
03accf76
KS
490 pub fn convert(&mut self, pic_in: &NABufferType, pic_out: &mut NABufferType) -> ScaleResult<()> {
491 let in_info = pic_in.get_video_info();
492 let out_info = pic_out.get_video_info();
493 if in_info.is_none() || out_info.is_none() { return Err(ScaleError::InvalidArgument); }
494 let in_info = in_info.unwrap();
495 let out_info = out_info.unwrap();
496 if self.just_convert &&
497 (in_info.get_width() != out_info.get_width() || in_info.get_height() != out_info.get_height()) {
498 return Err(ScaleError::InvalidArgument);
499 }
085742a3 500 let needs_flip = in_info.is_flipped() ^ out_info.is_flipped();
03accf76
KS
501 check_format(in_info, &self.fmt_in, self.just_convert)?;
502 check_format(out_info, &self.fmt_out, self.just_convert)?;
085742a3
KS
503 let ret = if let Some(ref mut pipe) = self.pipeline {
504 pipe.process(pic_in, pic_out)
505 } else {
506 copy(pic_in, pic_out);
507 Ok(())
508 };
509 if ret.is_ok() && needs_flip {
510 flip_picture(pic_out)?;
03accf76 511 }
085742a3 512 ret
03accf76
KS
513 }
514}
515
516#[cfg(test)]
517mod test {
518 use super::*;
519
520 fn fill_pic(pic: &mut NABufferType, val: u8) {
521 if let Some(ref mut buf) = pic.get_vbuf() {
522 let data = buf.get_data_mut().unwrap();
523 for el in data.iter_mut() { *el = val; }
524 } else if let Some(ref mut buf) = pic.get_vbuf16() {
525 let data = buf.get_data_mut().unwrap();
526 for el in data.iter_mut() { *el = val as u16; }
527 } else if let Some(ref mut buf) = pic.get_vbuf32() {
528 let data = buf.get_data_mut().unwrap();
529 for el in data.iter_mut() { *el = (val as u32) * 0x01010101; }
530 }
531 }
532 #[test]
533 fn test_convert() {
534 let mut in_pic = alloc_video_buffer(NAVideoInfo::new(1, 1, false, RGB565_FORMAT), 3).unwrap();
535 fill_pic(&mut in_pic, 42);
536 let mut out_pic = alloc_video_buffer(NAVideoInfo::new(1, 1, false, RGB24_FORMAT), 3).unwrap();
537 fill_pic(&mut out_pic, 0);
538 let ifmt = get_scale_fmt_from_pic(&in_pic);
539 let ofmt = get_scale_fmt_from_pic(&out_pic);
540 let mut scaler = NAScale::new(ifmt, ofmt).unwrap();
541 scaler.convert(&in_pic, &mut out_pic).unwrap();
542 let obuf = out_pic.get_vbuf().unwrap();
543 let odata = obuf.get_data();
544 assert_eq!(odata[0], 0x0);
545 assert_eq!(odata[1], 0x4);
546 assert_eq!(odata[2], 0x52);
547
548 let mut in_pic = alloc_video_buffer(NAVideoInfo::new(4, 4, false, RGB24_FORMAT), 3).unwrap();
549 fill_pic(&mut in_pic, 42);
550 let mut out_pic = alloc_video_buffer(NAVideoInfo::new(4, 4, false, YUV420_FORMAT), 3).unwrap();
551 fill_pic(&mut out_pic, 0);
552 let ifmt = get_scale_fmt_from_pic(&in_pic);
553 let ofmt = get_scale_fmt_from_pic(&out_pic);
554 let mut scaler = NAScale::new(ifmt, ofmt).unwrap();
555 scaler.convert(&in_pic, &mut out_pic).unwrap();
556 let obuf = out_pic.get_vbuf().unwrap();
557 let yoff = obuf.get_offset(0);
558 let uoff = obuf.get_offset(1);
559 let voff = obuf.get_offset(2);
560 let odata = obuf.get_data();
561 assert_eq!(odata[yoff], 42);
562 assert!(((odata[uoff] ^ 0x80) as i8).abs() <= 1);
563 assert!(((odata[voff] ^ 0x80) as i8).abs() <= 1);
564 let mut scaler = NAScale::new(ofmt, ifmt).unwrap();
565 scaler.convert(&out_pic, &mut in_pic).unwrap();
566 let obuf = in_pic.get_vbuf().unwrap();
567 let odata = obuf.get_data();
568 assert_eq!(odata[0], 42);
569 }
570 #[test]
571 fn test_scale() {
572 let mut in_pic = alloc_video_buffer(NAVideoInfo::new(2, 2, false, RGB565_FORMAT), 3).unwrap();
573 fill_pic(&mut in_pic, 42);
574 let mut out_pic = alloc_video_buffer(NAVideoInfo::new(3, 3, false, RGB565_FORMAT), 3).unwrap();
575 fill_pic(&mut out_pic, 0);
576 let ifmt = get_scale_fmt_from_pic(&in_pic);
577 let ofmt = get_scale_fmt_from_pic(&out_pic);
578 let mut scaler = NAScale::new(ifmt, ofmt).unwrap();
579 scaler.convert(&in_pic, &mut out_pic).unwrap();
580 let obuf = out_pic.get_vbuf16().unwrap();
581 let odata = obuf.get_data();
582 assert_eq!(odata[0], 42);
583 }
584 #[test]
585 fn test_scale_and_convert() {
586 let mut in_pic = alloc_video_buffer(NAVideoInfo::new(7, 3, false, RGB565_FORMAT), 3).unwrap();
587 fill_pic(&mut in_pic, 42);
588 let mut out_pic = alloc_video_buffer(NAVideoInfo::new(4, 4, false, YUV420_FORMAT), 3).unwrap();
589 fill_pic(&mut out_pic, 0);
590 let ifmt = get_scale_fmt_from_pic(&in_pic);
591 let ofmt = get_scale_fmt_from_pic(&out_pic);
592 let mut scaler = NAScale::new(ifmt, ofmt).unwrap();
593 scaler.convert(&in_pic, &mut out_pic).unwrap();
594 let obuf = out_pic.get_vbuf().unwrap();
595 let yoff = obuf.get_offset(0);
596 let uoff = obuf.get_offset(1);
597 let voff = obuf.get_offset(2);
598 let odata = obuf.get_data();
dd6800c5
KS
599 assert_eq!(odata[yoff], 11);
600 assert_eq!(odata[uoff], 162);
601 assert_eq!(odata[voff], 118);
03accf76 602 }
4b459d0b
KS
603 #[test]
604 fn test_scale_and_convert_to_pal() {
605 let mut in_pic = alloc_video_buffer(NAVideoInfo::new(7, 3, false, YUV420_FORMAT), 3).unwrap();
606 fill_pic(&mut in_pic, 142);
607 let mut out_pic = alloc_video_buffer(NAVideoInfo::new(4, 4, false, PAL8_FORMAT), 0).unwrap();
608 fill_pic(&mut out_pic, 0);
609 let ifmt = get_scale_fmt_from_pic(&in_pic);
610 let ofmt = get_scale_fmt_from_pic(&out_pic);
611 let mut scaler = NAScale::new(ifmt, ofmt).unwrap();
612 scaler.convert(&in_pic, &mut out_pic).unwrap();
613 let obuf = out_pic.get_vbuf().unwrap();
614 let dataoff = obuf.get_offset(0);
615 let paloff = obuf.get_offset(1);
616 let odata = obuf.get_data();
617 assert_eq!(odata[dataoff], 0);
618 assert_eq!(odata[paloff], 157);
dd6800c5 619 assert_eq!(odata[paloff + 1], 129);
4b459d0b
KS
620 assert_eq!(odata[paloff + 2], 170);
621 }
213e9f9e
KS
622 #[test]
623 fn test_scale_modes() {
624 const IN_DATA: [[u8; 6]; 2] = [
625 [0xFF, 0xC0, 0x40, 0x00, 0x40, 0xC0],
626 [0x00, 0x40, 0xC0, 0xFF, 0xC0, 0x40]
627 ];
628 const TEST_DATA: &[(&str, [[u8; 9]; 3])] = &[
629 ("nn",
630 [[0xFF, 0xC0, 0x40, 0xFF, 0xC0, 0x40, 0x00, 0x40, 0xC0],
631 [0xFF, 0xC0, 0x40, 0xFF, 0xC0, 0x40, 0x00, 0x40, 0xC0],
632 [0x00, 0x40, 0xC0, 0x00, 0x40, 0xC0, 0xFF, 0xC0, 0x40]]),
633 ("bilin",
634 [[0xFF, 0xC0, 0x40, 0x55, 0x6A, 0x95, 0x00, 0x40, 0xC0],
635 [0x55, 0x6A, 0x95, 0x8D, 0x86, 0x78, 0xAA, 0x95, 0x6A],
636 [0x00, 0x40, 0xC0, 0xAA, 0x95, 0x6A, 0xFF, 0xC0, 0x40]]),
637 ("bicubic",
638 [[0xFF, 0xC0, 0x40, 0x4B, 0x65, 0x9A, 0x00, 0x36, 0xC9],
639 [0x4B, 0x65, 0x9A, 0x94, 0x8A, 0x74, 0xB3, 0x9D, 0x61],
640 [0x00, 0x36, 0xC9, 0xBA, 0x9D, 0x61, 0xFF, 0xD3, 0x2B]]),
641 ("lanczos",
642 [[0xFF, 0xC0, 0x40, 0x4C, 0x66, 0x98, 0x00, 0x31, 0xCD],
643 [0x4C, 0x66, 0x98, 0x91, 0x88, 0x74, 0xB1, 0x9D, 0x5F],
644 [0x00, 0x31, 0xCD, 0xBB, 0x9D, 0x5F, 0xFF, 0xDD, 0x1E]]),
645 ("lanczos2",
646 [[0xFF, 0xC0, 0x40, 0x4F, 0x68, 0x9B, 0x00, 0x35, 0xCD],
647 [0x4F, 0x68, 0x9B, 0x96, 0x8D, 0x79, 0xB3, 0xA0, 0x64],
648 [0x00, 0x35, 0xCD, 0xBE, 0xA1, 0x65, 0xFF, 0xDC, 0x28]]),
649 ];
650
651 let in_pic = alloc_video_buffer(NAVideoInfo::new(2, 2, false, RGB24_FORMAT), 3).unwrap();
652 if let Some(ref mut vbuf) = in_pic.get_vbuf() {
653 let stride = vbuf.get_stride(0);
654 let data = vbuf.get_data_mut().unwrap();
655 for (dline, rline) in data.chunks_mut(stride).zip(IN_DATA.iter()) {
656 dline[..6].copy_from_slice(rline);
657 }
658 } else {
659 panic!("wrong format");
660 }
661 let mut out_pic = alloc_video_buffer(NAVideoInfo::new(3, 3, false, RGB24_FORMAT), 3).unwrap();
662 let ifmt = get_scale_fmt_from_pic(&in_pic);
663 let ofmt = get_scale_fmt_from_pic(&out_pic);
664 for (method, ref_data) in TEST_DATA.iter() {
665 fill_pic(&mut out_pic, 0);
666 let mut scaler = NAScale::new_with_options(ifmt, ofmt, &[("scaler".to_string(), method.to_string())]).unwrap();
667 scaler.convert(&in_pic, &mut out_pic).unwrap();
668 let obuf = out_pic.get_vbuf().unwrap();
669 let ostride = obuf.get_stride(0);
670 let odata = obuf.get_data();
671 for (oline, rline) in odata.chunks(ostride).zip(ref_data.iter()) {
672 for (&a, &b) in oline.iter().zip(rline.iter()) {
673 assert_eq!(a, b);
674 }
675 }
676 }
677 }
03accf76 678}