siren: correct scaling to match reference output
[nihav.git] / nihav-core / src / scale / mod.rs
CommitLineData
30d57e4a
KS
1//! Image conversion functionality.
2
3//! # Examples
4//!
5//! Convert input image into YUV one and scale down two times.
6//! ```no_run
7//! use nihav_core::scale::*;
8//! use nihav_core::formats::{RGB24_FORMAT, YUV420_FORMAT};
9//! use nihav_core::frame::{alloc_video_buffer, NAVideoInfo};
10//!
11//! let mut in_pic = alloc_video_buffer(NAVideoInfo::new(640, 480, false, RGB24_FORMAT), 4).unwrap();
12//! let mut out_pic = alloc_video_buffer(NAVideoInfo::new(320, 240, false, YUV420_FORMAT), 4).unwrap();
13//! let in_fmt = get_scale_fmt_from_pic(&in_pic);
14//! let out_fmt = get_scale_fmt_from_pic(&out_pic);
15//! let mut scaler = NAScale::new(in_fmt, out_fmt).unwrap();
16//! scaler.convert(&in_pic, &mut out_pic).unwrap();
17//! ```
03accf76
KS
18use crate::frame::*;
19
20mod kernel;
21
22mod colorcvt;
23mod repack;
24mod scale;
25
30d57e4a 26/// Image format information used by the converter.
03accf76
KS
27#[derive(Clone,Copy,PartialEq)]
28pub struct ScaleInfo {
30d57e4a 29 /// Pixel format description.
03accf76 30 pub fmt: NAPixelFormaton,
30d57e4a 31 /// Image width.
03accf76 32 pub width: usize,
30d57e4a 33 /// Image height.
03accf76
KS
34 pub height: usize,
35}
36
37impl std::fmt::Display for ScaleInfo {
38 fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
39 write!(f, "({}x{}, {})", self.width, self.height, self.fmt)
40 }
41}
42
30d57e4a 43/// A list specifying general image conversion errors.
03accf76
KS
44#[derive(Debug,Clone,Copy,PartialEq)]
45#[allow(dead_code)]
46pub enum ScaleError {
30d57e4a 47 /// Input or output buffer contains no image data.
03accf76 48 NoFrame,
30d57e4a 49 /// Allocation failed.
03accf76 50 AllocError,
30d57e4a 51 /// Invalid argument.
03accf76 52 InvalidArgument,
30d57e4a 53 /// Feature is not implemented.
03accf76 54 NotImplemented,
30d57e4a 55 /// Internal implementation bug.
03accf76
KS
56 Bug,
57}
58
30d57e4a 59/// A specialised `Result` type for image conversion operations.
03accf76
KS
60pub type ScaleResult<T> = Result<T, ScaleError>;
61
62/*trait Kernel {
63 fn init(&mut self, in_fmt: &ScaleInfo, dest_fmt: &ScaleInfo) -> ScaleResult<NABufferType>;
64 fn process(&mut self, pic_in: &NABufferType, pic_out: &mut NABufferType);
65}*/
66
67struct KernelDesc {
68 name: &'static str,
6011e201 69 create: fn () -> Box<dyn kernel::Kernel>,
03accf76
KS
70}
71
72impl KernelDesc {
6011e201 73 fn find(name: &str) -> ScaleResult<Box<dyn kernel::Kernel>> {
03accf76
KS
74 for kern in KERNELS.iter() {
75 if kern.name == name {
76 return Ok((kern.create)());
77 }
78 }
79 Err(ScaleError::InvalidArgument)
80 }
81}
82
83const KERNELS: &[KernelDesc] = &[
84 KernelDesc { name: "pack", create: repack::create_pack },
85 KernelDesc { name: "unpack", create: repack::create_unpack },
86 KernelDesc { name: "depal", create: repack::create_depal },
87 KernelDesc { name: "scale", create: scale::create_scale },
88 KernelDesc { name: "rgb_to_yuv", create: colorcvt::create_rgb2yuv },
89 KernelDesc { name: "yuv_to_rgb", create: colorcvt::create_yuv2rgb },
90];
91
92struct Stage {
93 fmt_out: ScaleInfo,
94 tmp_pic: NABufferType,
95 next: Option<Box<Stage>>,
6011e201 96 worker: Box<dyn kernel::Kernel>,
03accf76
KS
97}
98
30d57e4a 99/// Converts input picture information into format used by scaler.
03accf76
KS
100pub fn get_scale_fmt_from_pic(pic: &NABufferType) -> ScaleInfo {
101 let info = pic.get_video_info().unwrap();
102 ScaleInfo { fmt: info.get_format(), width: info.get_width(), height: info.get_height() }
103}
104
105impl Stage {
106 fn new(name: &str, in_fmt: &ScaleInfo, dest_fmt: &ScaleInfo) -> ScaleResult<Self> {
107 let mut worker = KernelDesc::find(name)?;
108 let tmp_pic = worker.init(in_fmt, dest_fmt)?;
109 let fmt_out = get_scale_fmt_from_pic(&tmp_pic);
110 Ok(Self { fmt_out, tmp_pic, next: None, worker })
111 }
112 fn add(&mut self, new: Stage) {
113 if let Some(ref mut next) = self.next {
114 next.add(new);
115 } else {
116 self.next = Some(Box::new(new));
117 }
118 }
119 fn process(&mut self, pic_in: &NABufferType, pic_out: &mut NABufferType) -> ScaleResult<()> {
120 if let Some(ref mut nextstage) = self.next {
121 self.worker.process(pic_in, &mut self.tmp_pic);
122 nextstage.process(&self.tmp_pic, pic_out)?;
123 } else {
124 self.worker.process(pic_in, pic_out);
125 }
126 Ok(())
127 }
128 fn drop_last_tmp(&mut self) {
129 if let Some(ref mut nextstage) = self.next {
130 nextstage.drop_last_tmp();
131 } else {
132 self.tmp_pic = NABufferType::None;
133 }
134 }
135}
136
30d57e4a 137/// Image format converter.
03accf76
KS
138pub struct NAScale {
139 fmt_in: ScaleInfo,
140 fmt_out: ScaleInfo,
141 just_convert: bool,
142 pipeline: Option<Stage>,
143}
144
145fn check_format(in_fmt: NAVideoInfo, ref_fmt: &ScaleInfo, just_convert: bool) -> ScaleResult<()> {
146 if in_fmt.get_format() != ref_fmt.fmt { return Err(ScaleError::InvalidArgument); }
147 if !just_convert && (in_fmt.get_width() != ref_fmt.width || in_fmt.get_height() != ref_fmt.height) {
148 return Err(ScaleError::InvalidArgument);
149 }
150 Ok(())
151}
152
153fn copy(pic_in: &NABufferType, pic_out: &mut NABufferType)
154{
155 if let (Some(ref sbuf), Some(ref mut dbuf)) = (pic_in.get_vbuf(), pic_out.get_vbuf()) {
79ec1d51
KS
156 let mut same = true;
157 let num_components = sbuf.get_info().get_format().get_num_comp();
158 for i in 0..num_components {
159 if sbuf.get_stride(i) != dbuf.get_stride(i) {
160 same = false;
161 break;
162 }
163 if sbuf.get_offset(i) != dbuf.get_offset(i) {
164 same = false;
165 break;
166 }
167 }
168 if same {
169 let sdata = sbuf.get_data();
170 let ddata = dbuf.get_data_mut().unwrap();
171 ddata.copy_from_slice(&sdata[0..]);
172 } else {
173 let sdata = sbuf.get_data();
174 for comp in 0..num_components {
175 let (_, h) = sbuf.get_dimensions(comp);
176 let src = &sdata[sbuf.get_offset(comp)..];
177 let sstride = sbuf.get_stride(comp);
178 let doff = dbuf.get_offset(comp);
179 let dstride = dbuf.get_stride(comp);
180 let ddata = dbuf.get_data_mut().unwrap();
181 let dst = &mut ddata[doff..];
182 let copy_size = sstride.min(dstride);
183 for (dline, sline) in dst.chunks_exact_mut(dstride).take(h).zip(src.chunks_exact(sstride)) {
184 (&mut dline[..copy_size]).copy_from_slice(&sline[..copy_size]);
185 }
186 }
187 }
03accf76
KS
188 } else {
189 unimplemented!();
190 }
191}
192
193macro_rules! add_stage {
194 ($head:expr, $new:expr) => {
195 if let Some(ref mut h) = $head {
196 h.add($new);
197 } else {
198 $head = Some($new);
199 }
200 };
201}
202fn is_better_fmt(a: &ScaleInfo, b: &ScaleInfo) -> bool {
203 if (a.width >= b.width) && (a.height >= b.height) {
204 return true;
205 }
206 if a.fmt.get_max_depth() > b.fmt.get_max_depth() {
207 return true;
208 }
209 if a.fmt.get_max_subsampling() < b.fmt.get_max_subsampling() {
210 return true;
211 }
212 false
213}
214fn build_pipeline(ifmt: &ScaleInfo, ofmt: &ScaleInfo, just_convert: bool) -> ScaleResult<Option<Stage>> {
215 let inname = ifmt.fmt.get_model().get_short_name();
216 let outname = ofmt.fmt.get_model().get_short_name();
217
218println!("convert {} -> {}", ifmt, ofmt);
e243ceb4 219 let needs_scale = if (ofmt.fmt.get_max_subsampling() > 0) &&
03accf76 220 (ofmt.fmt.get_max_subsampling() != ifmt.fmt.get_max_subsampling()) {
e243ceb4
KS
221 true
222 } else {
223 !just_convert
224 };
e9d8cce7 225 let needs_unpack = !ifmt.fmt.is_unpacked();
03accf76 226 let needs_pack = !ofmt.fmt.is_unpacked();
e243ceb4 227 let needs_convert = inname != outname;
03accf76
KS
228 let scale_before_cvt = is_better_fmt(&ifmt, &ofmt) && needs_convert
229 && (ofmt.fmt.get_max_subsampling() == 0);
230//todo stages for model and gamma conversion
231
232 let mut stages: Option<Stage> = None;
233 let mut cur_fmt = *ifmt;
234
235 if needs_unpack {
236println!("[adding unpack]");
e243ceb4
KS
237 let new_stage = if !cur_fmt.fmt.is_paletted() {
238 Stage::new("unpack", &cur_fmt, &ofmt)?
239 } else {
240 Stage::new("depal", &cur_fmt, &ofmt)?
241 };
03accf76
KS
242 cur_fmt = new_stage.fmt_out;
243 add_stage!(stages, new_stage);
244 }
245 if needs_scale && scale_before_cvt {
246println!("[adding scale]");
247 let new_stage = Stage::new("scale", &cur_fmt, &ofmt)?;
248 cur_fmt = new_stage.fmt_out;
249 add_stage!(stages, new_stage);
250 }
251 if needs_convert {
252println!("[adding convert]");
253 let cvtname = format!("{}_to_{}", inname, outname);
254println!("[{}]", cvtname);
255 let new_stage = Stage::new(&cvtname, &cur_fmt, &ofmt)?;
256//todo if fails try converting via RGB or YUV
257 cur_fmt = new_stage.fmt_out;
258 add_stage!(stages, new_stage);
259//todo alpha plane copy/add
260 }
261 if needs_scale && !scale_before_cvt {
262println!("[adding scale]");
263 let new_stage = Stage::new("scale", &cur_fmt, &ofmt)?;
264 cur_fmt = new_stage.fmt_out;
265 add_stage!(stages, new_stage);
266 }
03accf76
KS
267 if needs_pack {
268println!("[adding pack]");
269 let new_stage = Stage::new("pack", &cur_fmt, &ofmt)?;
270 //cur_fmt = new_stage.fmt_out;
271 add_stage!(stages, new_stage);
272 }
273
274 if let Some(ref mut head) = stages {
275 head.drop_last_tmp();
276 }
d24468d9 277
03accf76
KS
278 Ok(stages)
279}
280
085742a3
KS
281fn swap_plane<T:Copy>(data: &mut [T], stride: usize, h: usize, line0: &mut [T], line1: &mut [T]) {
282 let mut doff0 = 0;
283 let mut doff1 = stride * (h - 1);
284 for _ in 0..h/2 {
285 line0.copy_from_slice(&data[doff0..][..stride]);
286 line1.copy_from_slice(&data[doff1..][..stride]);
287 (&mut data[doff1..][..stride]).copy_from_slice(line0);
288 (&mut data[doff0..][..stride]).copy_from_slice(line1);
289 doff0 += stride;
290 doff1 -= stride;
291 }
292}
293
30d57e4a 294/// Flips the picture contents.
085742a3
KS
295pub fn flip_picture(pic: &mut NABufferType) -> ScaleResult<()> {
296 match pic {
297 NABufferType::Video(ref mut vb) => {
298 let ncomp = vb.get_num_components();
299 for comp in 0..ncomp {
300 let off = vb.get_offset(comp);
301 let stride = vb.get_stride(comp);
302 let (_, h) = vb.get_dimensions(comp);
303 let data = vb.get_data_mut().unwrap();
304 let mut line0 = vec![0; stride];
305 let mut line1 = vec![0; stride];
306 swap_plane(&mut data[off..], stride, h, line0.as_mut_slice(), line1.as_mut_slice());
307 }
308 },
309 NABufferType::Video16(ref mut vb) => {
310 let ncomp = vb.get_num_components();
311 for comp in 0..ncomp {
312 let off = vb.get_offset(comp);
313 let stride = vb.get_stride(comp);
314 let (_, h) = vb.get_dimensions(comp);
315 let data = vb.get_data_mut().unwrap();
316 let mut line0 = vec![0; stride];
317 let mut line1 = vec![0; stride];
318 swap_plane(&mut data[off..], stride, h, line0.as_mut_slice(), line1.as_mut_slice());
319 }
320 },
321 NABufferType::Video32(ref mut vb) => {
322 let ncomp = vb.get_num_components();
323 for comp in 0..ncomp {
324 let off = vb.get_offset(comp);
325 let stride = vb.get_stride(comp);
326 let (_, h) = vb.get_dimensions(comp);
327 let data = vb.get_data_mut().unwrap();
328 let mut line0 = vec![0; stride];
329 let mut line1 = vec![0; stride];
330 swap_plane(&mut data[off..], stride, h, line0.as_mut_slice(), line1.as_mut_slice());
331 }
332 },
333 NABufferType::VideoPacked(ref mut vb) => {
334 let ncomp = vb.get_num_components();
335 for comp in 0..ncomp {
336 let off = vb.get_offset(comp);
337 let stride = vb.get_stride(comp);
338 let (_, h) = vb.get_dimensions(comp);
339 let data = vb.get_data_mut().unwrap();
340 let mut line0 = vec![0; stride];
341 let mut line1 = vec![0; stride];
342 swap_plane(&mut data[off..], stride, h, line0.as_mut_slice(), line1.as_mut_slice());
343 }
344 },
345 _ => { return Err(ScaleError::InvalidArgument); },
346 };
347 Ok(())
348}
349
03accf76 350impl NAScale {
30d57e4a 351 /// Constructs a new `NAScale` instance.
03accf76
KS
352 pub fn new(fmt_in: ScaleInfo, fmt_out: ScaleInfo) -> ScaleResult<Self> {
353 let pipeline;
354 let just_convert = (fmt_in.width == fmt_out.width) && (fmt_in.height == fmt_out.height);
355 if fmt_in != fmt_out {
356 pipeline = build_pipeline(&fmt_in, &fmt_out, just_convert)?;
357 } else {
358 pipeline = None;
359 }
360 Ok(Self { fmt_in, fmt_out, just_convert, pipeline })
361 }
30d57e4a 362 /// Checks whether requested conversion operation is needed at all.
03accf76 363 pub fn needs_processing(&self) -> bool { self.pipeline.is_some() }
30d57e4a 364 /// Returns the input image format.
03accf76 365 pub fn get_in_fmt(&self) -> ScaleInfo { self.fmt_in }
30d57e4a 366 /// Returns the output image format.
03accf76 367 pub fn get_out_fmt(&self) -> ScaleInfo { self.fmt_out }
30d57e4a 368 /// Performs the image format conversion.
03accf76
KS
369 pub fn convert(&mut self, pic_in: &NABufferType, pic_out: &mut NABufferType) -> ScaleResult<()> {
370 let in_info = pic_in.get_video_info();
371 let out_info = pic_out.get_video_info();
372 if in_info.is_none() || out_info.is_none() { return Err(ScaleError::InvalidArgument); }
373 let in_info = in_info.unwrap();
374 let out_info = out_info.unwrap();
375 if self.just_convert &&
376 (in_info.get_width() != out_info.get_width() || in_info.get_height() != out_info.get_height()) {
377 return Err(ScaleError::InvalidArgument);
378 }
085742a3 379 let needs_flip = in_info.is_flipped() ^ out_info.is_flipped();
03accf76
KS
380 check_format(in_info, &self.fmt_in, self.just_convert)?;
381 check_format(out_info, &self.fmt_out, self.just_convert)?;
085742a3
KS
382 let ret = if let Some(ref mut pipe) = self.pipeline {
383 pipe.process(pic_in, pic_out)
384 } else {
385 copy(pic_in, pic_out);
386 Ok(())
387 };
388 if ret.is_ok() && needs_flip {
389 flip_picture(pic_out)?;
03accf76 390 }
085742a3 391 ret
03accf76
KS
392 }
393}
394
395#[cfg(test)]
396mod test {
397 use super::*;
398
399 fn fill_pic(pic: &mut NABufferType, val: u8) {
400 if let Some(ref mut buf) = pic.get_vbuf() {
401 let data = buf.get_data_mut().unwrap();
402 for el in data.iter_mut() { *el = val; }
403 } else if let Some(ref mut buf) = pic.get_vbuf16() {
404 let data = buf.get_data_mut().unwrap();
405 for el in data.iter_mut() { *el = val as u16; }
406 } else if let Some(ref mut buf) = pic.get_vbuf32() {
407 let data = buf.get_data_mut().unwrap();
408 for el in data.iter_mut() { *el = (val as u32) * 0x01010101; }
409 }
410 }
411 #[test]
412 fn test_convert() {
413 let mut in_pic = alloc_video_buffer(NAVideoInfo::new(1, 1, false, RGB565_FORMAT), 3).unwrap();
414 fill_pic(&mut in_pic, 42);
415 let mut out_pic = alloc_video_buffer(NAVideoInfo::new(1, 1, false, RGB24_FORMAT), 3).unwrap();
416 fill_pic(&mut out_pic, 0);
417 let ifmt = get_scale_fmt_from_pic(&in_pic);
418 let ofmt = get_scale_fmt_from_pic(&out_pic);
419 let mut scaler = NAScale::new(ifmt, ofmt).unwrap();
420 scaler.convert(&in_pic, &mut out_pic).unwrap();
421 let obuf = out_pic.get_vbuf().unwrap();
422 let odata = obuf.get_data();
423 assert_eq!(odata[0], 0x0);
424 assert_eq!(odata[1], 0x4);
425 assert_eq!(odata[2], 0x52);
426
427 let mut in_pic = alloc_video_buffer(NAVideoInfo::new(4, 4, false, RGB24_FORMAT), 3).unwrap();
428 fill_pic(&mut in_pic, 42);
429 let mut out_pic = alloc_video_buffer(NAVideoInfo::new(4, 4, false, YUV420_FORMAT), 3).unwrap();
430 fill_pic(&mut out_pic, 0);
431 let ifmt = get_scale_fmt_from_pic(&in_pic);
432 let ofmt = get_scale_fmt_from_pic(&out_pic);
433 let mut scaler = NAScale::new(ifmt, ofmt).unwrap();
434 scaler.convert(&in_pic, &mut out_pic).unwrap();
435 let obuf = out_pic.get_vbuf().unwrap();
436 let yoff = obuf.get_offset(0);
437 let uoff = obuf.get_offset(1);
438 let voff = obuf.get_offset(2);
439 let odata = obuf.get_data();
440 assert_eq!(odata[yoff], 42);
441 assert!(((odata[uoff] ^ 0x80) as i8).abs() <= 1);
442 assert!(((odata[voff] ^ 0x80) as i8).abs() <= 1);
443 let mut scaler = NAScale::new(ofmt, ifmt).unwrap();
444 scaler.convert(&out_pic, &mut in_pic).unwrap();
445 let obuf = in_pic.get_vbuf().unwrap();
446 let odata = obuf.get_data();
447 assert_eq!(odata[0], 42);
448 }
449 #[test]
450 fn test_scale() {
451 let mut in_pic = alloc_video_buffer(NAVideoInfo::new(2, 2, false, RGB565_FORMAT), 3).unwrap();
452 fill_pic(&mut in_pic, 42);
453 let mut out_pic = alloc_video_buffer(NAVideoInfo::new(3, 3, false, RGB565_FORMAT), 3).unwrap();
454 fill_pic(&mut out_pic, 0);
455 let ifmt = get_scale_fmt_from_pic(&in_pic);
456 let ofmt = get_scale_fmt_from_pic(&out_pic);
457 let mut scaler = NAScale::new(ifmt, ofmt).unwrap();
458 scaler.convert(&in_pic, &mut out_pic).unwrap();
459 let obuf = out_pic.get_vbuf16().unwrap();
460 let odata = obuf.get_data();
461 assert_eq!(odata[0], 42);
462 }
463 #[test]
464 fn test_scale_and_convert() {
465 let mut in_pic = alloc_video_buffer(NAVideoInfo::new(7, 3, false, RGB565_FORMAT), 3).unwrap();
466 fill_pic(&mut in_pic, 42);
467 let mut out_pic = alloc_video_buffer(NAVideoInfo::new(4, 4, false, YUV420_FORMAT), 3).unwrap();
468 fill_pic(&mut out_pic, 0);
469 let ifmt = get_scale_fmt_from_pic(&in_pic);
470 let ofmt = get_scale_fmt_from_pic(&out_pic);
471 let mut scaler = NAScale::new(ifmt, ofmt).unwrap();
472 scaler.convert(&in_pic, &mut out_pic).unwrap();
473 let obuf = out_pic.get_vbuf().unwrap();
474 let yoff = obuf.get_offset(0);
475 let uoff = obuf.get_offset(1);
476 let voff = obuf.get_offset(2);
477 let odata = obuf.get_data();
478 assert_eq!(odata[yoff], 28);
479 assert_eq!(odata[uoff], 154);
480 assert_eq!(odata[voff], 103);
481 }
482}