cinepakenc: ask for non-flipped image during negotiation
[nihav.git] / nihav-core / src / scale / mod.rs
CommitLineData
30d57e4a
KS
1//! Image conversion functionality.
2
3//! # Examples
4//!
5//! Convert input image into YUV one and scale down two times.
6//! ```no_run
7//! use nihav_core::scale::*;
8//! use nihav_core::formats::{RGB24_FORMAT, YUV420_FORMAT};
9//! use nihav_core::frame::{alloc_video_buffer, NAVideoInfo};
10//!
11//! let mut in_pic = alloc_video_buffer(NAVideoInfo::new(640, 480, false, RGB24_FORMAT), 4).unwrap();
12//! let mut out_pic = alloc_video_buffer(NAVideoInfo::new(320, 240, false, YUV420_FORMAT), 4).unwrap();
13//! let in_fmt = get_scale_fmt_from_pic(&in_pic);
14//! let out_fmt = get_scale_fmt_from_pic(&out_pic);
15//! let mut scaler = NAScale::new(in_fmt, out_fmt).unwrap();
16//! scaler.convert(&in_pic, &mut out_pic).unwrap();
17//! ```
03accf76
KS
18use crate::frame::*;
19
20mod kernel;
21
22mod colorcvt;
23mod repack;
b36f412c 24#[allow(clippy::module_inception)]
03accf76
KS
25mod scale;
26
4b459d0b
KS
27mod palette;
28
29pub use crate::scale::palette::{palettise_frame, QuantisationMode, PaletteSearchMode};
30
30d57e4a 31/// Image format information used by the converter.
03accf76
KS
32#[derive(Clone,Copy,PartialEq)]
33pub struct ScaleInfo {
30d57e4a 34 /// Pixel format description.
03accf76 35 pub fmt: NAPixelFormaton,
30d57e4a 36 /// Image width.
03accf76 37 pub width: usize,
30d57e4a 38 /// Image height.
03accf76
KS
39 pub height: usize,
40}
41
42impl std::fmt::Display for ScaleInfo {
43 fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
44 write!(f, "({}x{}, {})", self.width, self.height, self.fmt)
45 }
46}
47
30d57e4a 48/// A list specifying general image conversion errors.
03accf76
KS
49#[derive(Debug,Clone,Copy,PartialEq)]
50#[allow(dead_code)]
51pub enum ScaleError {
30d57e4a 52 /// Input or output buffer contains no image data.
03accf76 53 NoFrame,
30d57e4a 54 /// Allocation failed.
03accf76 55 AllocError,
30d57e4a 56 /// Invalid argument.
03accf76 57 InvalidArgument,
30d57e4a 58 /// Feature is not implemented.
03accf76 59 NotImplemented,
30d57e4a 60 /// Internal implementation bug.
03accf76
KS
61 Bug,
62}
63
30d57e4a 64/// A specialised `Result` type for image conversion operations.
03accf76
KS
65pub type ScaleResult<T> = Result<T, ScaleError>;
66
67/*trait Kernel {
68 fn init(&mut self, in_fmt: &ScaleInfo, dest_fmt: &ScaleInfo) -> ScaleResult<NABufferType>;
69 fn process(&mut self, pic_in: &NABufferType, pic_out: &mut NABufferType);
70}*/
71
72struct KernelDesc {
73 name: &'static str,
6011e201 74 create: fn () -> Box<dyn kernel::Kernel>,
03accf76
KS
75}
76
77impl KernelDesc {
6011e201 78 fn find(name: &str) -> ScaleResult<Box<dyn kernel::Kernel>> {
03accf76
KS
79 for kern in KERNELS.iter() {
80 if kern.name == name {
81 return Ok((kern.create)());
82 }
83 }
84 Err(ScaleError::InvalidArgument)
85 }
86}
87
88const KERNELS: &[KernelDesc] = &[
89 KernelDesc { name: "pack", create: repack::create_pack },
90 KernelDesc { name: "unpack", create: repack::create_unpack },
91 KernelDesc { name: "depal", create: repack::create_depal },
4b459d0b 92 KernelDesc { name: "palette", create: palette::create_palettise },
03accf76
KS
93 KernelDesc { name: "scale", create: scale::create_scale },
94 KernelDesc { name: "rgb_to_yuv", create: colorcvt::create_rgb2yuv },
95 KernelDesc { name: "yuv_to_rgb", create: colorcvt::create_yuv2rgb },
96];
97
98struct Stage {
99 fmt_out: ScaleInfo,
100 tmp_pic: NABufferType,
101 next: Option<Box<Stage>>,
6011e201 102 worker: Box<dyn kernel::Kernel>,
03accf76
KS
103}
104
30d57e4a 105/// Converts input picture information into format used by scaler.
03accf76
KS
106pub fn get_scale_fmt_from_pic(pic: &NABufferType) -> ScaleInfo {
107 let info = pic.get_video_info().unwrap();
108 ScaleInfo { fmt: info.get_format(), width: info.get_width(), height: info.get_height() }
109}
110
111impl Stage {
112 fn new(name: &str, in_fmt: &ScaleInfo, dest_fmt: &ScaleInfo) -> ScaleResult<Self> {
113 let mut worker = KernelDesc::find(name)?;
114 let tmp_pic = worker.init(in_fmt, dest_fmt)?;
115 let fmt_out = get_scale_fmt_from_pic(&tmp_pic);
116 Ok(Self { fmt_out, tmp_pic, next: None, worker })
117 }
118 fn add(&mut self, new: Stage) {
119 if let Some(ref mut next) = self.next {
120 next.add(new);
121 } else {
122 self.next = Some(Box::new(new));
123 }
124 }
125 fn process(&mut self, pic_in: &NABufferType, pic_out: &mut NABufferType) -> ScaleResult<()> {
126 if let Some(ref mut nextstage) = self.next {
127 self.worker.process(pic_in, &mut self.tmp_pic);
128 nextstage.process(&self.tmp_pic, pic_out)?;
129 } else {
130 self.worker.process(pic_in, pic_out);
131 }
132 Ok(())
133 }
134 fn drop_last_tmp(&mut self) {
135 if let Some(ref mut nextstage) = self.next {
136 nextstage.drop_last_tmp();
137 } else {
138 self.tmp_pic = NABufferType::None;
139 }
140 }
141}
142
30d57e4a 143/// Image format converter.
03accf76
KS
144pub struct NAScale {
145 fmt_in: ScaleInfo,
146 fmt_out: ScaleInfo,
147 just_convert: bool,
148 pipeline: Option<Stage>,
149}
150
151fn check_format(in_fmt: NAVideoInfo, ref_fmt: &ScaleInfo, just_convert: bool) -> ScaleResult<()> {
152 if in_fmt.get_format() != ref_fmt.fmt { return Err(ScaleError::InvalidArgument); }
153 if !just_convert && (in_fmt.get_width() != ref_fmt.width || in_fmt.get_height() != ref_fmt.height) {
154 return Err(ScaleError::InvalidArgument);
155 }
156 Ok(())
157}
158
159fn copy(pic_in: &NABufferType, pic_out: &mut NABufferType)
160{
161 if let (Some(ref sbuf), Some(ref mut dbuf)) = (pic_in.get_vbuf(), pic_out.get_vbuf()) {
79ec1d51
KS
162 let mut same = true;
163 let num_components = sbuf.get_info().get_format().get_num_comp();
164 for i in 0..num_components {
165 if sbuf.get_stride(i) != dbuf.get_stride(i) {
166 same = false;
167 break;
168 }
169 if sbuf.get_offset(i) != dbuf.get_offset(i) {
170 same = false;
171 break;
172 }
173 }
174 if same {
175 let sdata = sbuf.get_data();
176 let ddata = dbuf.get_data_mut().unwrap();
177 ddata.copy_from_slice(&sdata[0..]);
178 } else {
179 let sdata = sbuf.get_data();
180 for comp in 0..num_components {
181 let (_, h) = sbuf.get_dimensions(comp);
182 let src = &sdata[sbuf.get_offset(comp)..];
183 let sstride = sbuf.get_stride(comp);
184 let doff = dbuf.get_offset(comp);
185 let dstride = dbuf.get_stride(comp);
186 let ddata = dbuf.get_data_mut().unwrap();
187 let dst = &mut ddata[doff..];
188 let copy_size = sstride.min(dstride);
189 for (dline, sline) in dst.chunks_exact_mut(dstride).take(h).zip(src.chunks_exact(sstride)) {
190 (&mut dline[..copy_size]).copy_from_slice(&sline[..copy_size]);
191 }
192 }
193 }
03accf76
KS
194 } else {
195 unimplemented!();
196 }
197}
198
199macro_rules! add_stage {
200 ($head:expr, $new:expr) => {
201 if let Some(ref mut h) = $head {
202 h.add($new);
203 } else {
204 $head = Some($new);
205 }
206 };
207}
208fn is_better_fmt(a: &ScaleInfo, b: &ScaleInfo) -> bool {
209 if (a.width >= b.width) && (a.height >= b.height) {
210 return true;
211 }
212 if a.fmt.get_max_depth() > b.fmt.get_max_depth() {
213 return true;
214 }
215 if a.fmt.get_max_subsampling() < b.fmt.get_max_subsampling() {
216 return true;
217 }
218 false
219}
220fn build_pipeline(ifmt: &ScaleInfo, ofmt: &ScaleInfo, just_convert: bool) -> ScaleResult<Option<Stage>> {
221 let inname = ifmt.fmt.get_model().get_short_name();
222 let outname = ofmt.fmt.get_model().get_short_name();
223
224println!("convert {} -> {}", ifmt, ofmt);
e243ceb4 225 let needs_scale = if (ofmt.fmt.get_max_subsampling() > 0) &&
03accf76 226 (ofmt.fmt.get_max_subsampling() != ifmt.fmt.get_max_subsampling()) {
e243ceb4
KS
227 true
228 } else {
229 !just_convert
230 };
e9d8cce7 231 let needs_unpack = !ifmt.fmt.is_unpacked();
03accf76 232 let needs_pack = !ofmt.fmt.is_unpacked();
e243ceb4 233 let needs_convert = inname != outname;
03accf76
KS
234 let scale_before_cvt = is_better_fmt(&ifmt, &ofmt) && needs_convert
235 && (ofmt.fmt.get_max_subsampling() == 0);
4b459d0b 236 let needs_palettise = ofmt.fmt.palette;
03accf76
KS
237//todo stages for model and gamma conversion
238
239 let mut stages: Option<Stage> = None;
240 let mut cur_fmt = *ifmt;
241
242 if needs_unpack {
243println!("[adding unpack]");
e243ceb4
KS
244 let new_stage = if !cur_fmt.fmt.is_paletted() {
245 Stage::new("unpack", &cur_fmt, &ofmt)?
246 } else {
247 Stage::new("depal", &cur_fmt, &ofmt)?
248 };
03accf76
KS
249 cur_fmt = new_stage.fmt_out;
250 add_stage!(stages, new_stage);
251 }
252 if needs_scale && scale_before_cvt {
253println!("[adding scale]");
254 let new_stage = Stage::new("scale", &cur_fmt, &ofmt)?;
255 cur_fmt = new_stage.fmt_out;
256 add_stage!(stages, new_stage);
257 }
258 if needs_convert {
259println!("[adding convert]");
260 let cvtname = format!("{}_to_{}", inname, outname);
261println!("[{}]", cvtname);
262 let new_stage = Stage::new(&cvtname, &cur_fmt, &ofmt)?;
263//todo if fails try converting via RGB or YUV
264 cur_fmt = new_stage.fmt_out;
265 add_stage!(stages, new_stage);
266//todo alpha plane copy/add
267 }
268 if needs_scale && !scale_before_cvt {
269println!("[adding scale]");
270 let new_stage = Stage::new("scale", &cur_fmt, &ofmt)?;
271 cur_fmt = new_stage.fmt_out;
272 add_stage!(stages, new_stage);
273 }
4b459d0b 274 if needs_pack && !needs_palettise {
03accf76
KS
275println!("[adding pack]");
276 let new_stage = Stage::new("pack", &cur_fmt, &ofmt)?;
277 //cur_fmt = new_stage.fmt_out;
278 add_stage!(stages, new_stage);
279 }
4b459d0b
KS
280 if needs_palettise {
281println!("[adding palettise]");
282 let new_stage = Stage::new("palette", &cur_fmt, &ofmt)?;
283 //cur_fmt = new_stage.fmt_out;
284 add_stage!(stages, new_stage);
285 }
03accf76
KS
286
287 if let Some(ref mut head) = stages {
288 head.drop_last_tmp();
289 }
d24468d9 290
03accf76
KS
291 Ok(stages)
292}
293
085742a3
KS
294fn swap_plane<T:Copy>(data: &mut [T], stride: usize, h: usize, line0: &mut [T], line1: &mut [T]) {
295 let mut doff0 = 0;
296 let mut doff1 = stride * (h - 1);
297 for _ in 0..h/2 {
298 line0.copy_from_slice(&data[doff0..][..stride]);
299 line1.copy_from_slice(&data[doff1..][..stride]);
300 (&mut data[doff1..][..stride]).copy_from_slice(line0);
301 (&mut data[doff0..][..stride]).copy_from_slice(line1);
302 doff0 += stride;
303 doff1 -= stride;
304 }
305}
306
30d57e4a 307/// Flips the picture contents.
085742a3
KS
308pub fn flip_picture(pic: &mut NABufferType) -> ScaleResult<()> {
309 match pic {
310 NABufferType::Video(ref mut vb) => {
311 let ncomp = vb.get_num_components();
312 for comp in 0..ncomp {
313 let off = vb.get_offset(comp);
314 let stride = vb.get_stride(comp);
315 let (_, h) = vb.get_dimensions(comp);
316 let data = vb.get_data_mut().unwrap();
317 let mut line0 = vec![0; stride];
318 let mut line1 = vec![0; stride];
319 swap_plane(&mut data[off..], stride, h, line0.as_mut_slice(), line1.as_mut_slice());
320 }
321 },
322 NABufferType::Video16(ref mut vb) => {
323 let ncomp = vb.get_num_components();
324 for comp in 0..ncomp {
325 let off = vb.get_offset(comp);
326 let stride = vb.get_stride(comp);
327 let (_, h) = vb.get_dimensions(comp);
328 let data = vb.get_data_mut().unwrap();
329 let mut line0 = vec![0; stride];
330 let mut line1 = vec![0; stride];
331 swap_plane(&mut data[off..], stride, h, line0.as_mut_slice(), line1.as_mut_slice());
332 }
333 },
334 NABufferType::Video32(ref mut vb) => {
335 let ncomp = vb.get_num_components();
336 for comp in 0..ncomp {
337 let off = vb.get_offset(comp);
338 let stride = vb.get_stride(comp);
339 let (_, h) = vb.get_dimensions(comp);
340 let data = vb.get_data_mut().unwrap();
341 let mut line0 = vec![0; stride];
342 let mut line1 = vec![0; stride];
343 swap_plane(&mut data[off..], stride, h, line0.as_mut_slice(), line1.as_mut_slice());
344 }
345 },
346 NABufferType::VideoPacked(ref mut vb) => {
347 let ncomp = vb.get_num_components();
348 for comp in 0..ncomp {
349 let off = vb.get_offset(comp);
350 let stride = vb.get_stride(comp);
351 let (_, h) = vb.get_dimensions(comp);
352 let data = vb.get_data_mut().unwrap();
353 let mut line0 = vec![0; stride];
354 let mut line1 = vec![0; stride];
355 swap_plane(&mut data[off..], stride, h, line0.as_mut_slice(), line1.as_mut_slice());
356 }
357 },
358 _ => { return Err(ScaleError::InvalidArgument); },
359 };
360 Ok(())
361}
362
03accf76 363impl NAScale {
30d57e4a 364 /// Constructs a new `NAScale` instance.
03accf76
KS
365 pub fn new(fmt_in: ScaleInfo, fmt_out: ScaleInfo) -> ScaleResult<Self> {
366 let pipeline;
367 let just_convert = (fmt_in.width == fmt_out.width) && (fmt_in.height == fmt_out.height);
368 if fmt_in != fmt_out {
369 pipeline = build_pipeline(&fmt_in, &fmt_out, just_convert)?;
370 } else {
371 pipeline = None;
372 }
373 Ok(Self { fmt_in, fmt_out, just_convert, pipeline })
374 }
30d57e4a 375 /// Checks whether requested conversion operation is needed at all.
03accf76 376 pub fn needs_processing(&self) -> bool { self.pipeline.is_some() }
30d57e4a 377 /// Returns the input image format.
03accf76 378 pub fn get_in_fmt(&self) -> ScaleInfo { self.fmt_in }
30d57e4a 379 /// Returns the output image format.
03accf76 380 pub fn get_out_fmt(&self) -> ScaleInfo { self.fmt_out }
30d57e4a 381 /// Performs the image format conversion.
03accf76
KS
382 pub fn convert(&mut self, pic_in: &NABufferType, pic_out: &mut NABufferType) -> ScaleResult<()> {
383 let in_info = pic_in.get_video_info();
384 let out_info = pic_out.get_video_info();
385 if in_info.is_none() || out_info.is_none() { return Err(ScaleError::InvalidArgument); }
386 let in_info = in_info.unwrap();
387 let out_info = out_info.unwrap();
388 if self.just_convert &&
389 (in_info.get_width() != out_info.get_width() || in_info.get_height() != out_info.get_height()) {
390 return Err(ScaleError::InvalidArgument);
391 }
085742a3 392 let needs_flip = in_info.is_flipped() ^ out_info.is_flipped();
03accf76
KS
393 check_format(in_info, &self.fmt_in, self.just_convert)?;
394 check_format(out_info, &self.fmt_out, self.just_convert)?;
085742a3
KS
395 let ret = if let Some(ref mut pipe) = self.pipeline {
396 pipe.process(pic_in, pic_out)
397 } else {
398 copy(pic_in, pic_out);
399 Ok(())
400 };
401 if ret.is_ok() && needs_flip {
402 flip_picture(pic_out)?;
03accf76 403 }
085742a3 404 ret
03accf76
KS
405 }
406}
407
408#[cfg(test)]
409mod test {
410 use super::*;
411
412 fn fill_pic(pic: &mut NABufferType, val: u8) {
413 if let Some(ref mut buf) = pic.get_vbuf() {
414 let data = buf.get_data_mut().unwrap();
415 for el in data.iter_mut() { *el = val; }
416 } else if let Some(ref mut buf) = pic.get_vbuf16() {
417 let data = buf.get_data_mut().unwrap();
418 for el in data.iter_mut() { *el = val as u16; }
419 } else if let Some(ref mut buf) = pic.get_vbuf32() {
420 let data = buf.get_data_mut().unwrap();
421 for el in data.iter_mut() { *el = (val as u32) * 0x01010101; }
422 }
423 }
424 #[test]
425 fn test_convert() {
426 let mut in_pic = alloc_video_buffer(NAVideoInfo::new(1, 1, false, RGB565_FORMAT), 3).unwrap();
427 fill_pic(&mut in_pic, 42);
428 let mut out_pic = alloc_video_buffer(NAVideoInfo::new(1, 1, false, RGB24_FORMAT), 3).unwrap();
429 fill_pic(&mut out_pic, 0);
430 let ifmt = get_scale_fmt_from_pic(&in_pic);
431 let ofmt = get_scale_fmt_from_pic(&out_pic);
432 let mut scaler = NAScale::new(ifmt, ofmt).unwrap();
433 scaler.convert(&in_pic, &mut out_pic).unwrap();
434 let obuf = out_pic.get_vbuf().unwrap();
435 let odata = obuf.get_data();
436 assert_eq!(odata[0], 0x0);
437 assert_eq!(odata[1], 0x4);
438 assert_eq!(odata[2], 0x52);
439
440 let mut in_pic = alloc_video_buffer(NAVideoInfo::new(4, 4, false, RGB24_FORMAT), 3).unwrap();
441 fill_pic(&mut in_pic, 42);
442 let mut out_pic = alloc_video_buffer(NAVideoInfo::new(4, 4, false, YUV420_FORMAT), 3).unwrap();
443 fill_pic(&mut out_pic, 0);
444 let ifmt = get_scale_fmt_from_pic(&in_pic);
445 let ofmt = get_scale_fmt_from_pic(&out_pic);
446 let mut scaler = NAScale::new(ifmt, ofmt).unwrap();
447 scaler.convert(&in_pic, &mut out_pic).unwrap();
448 let obuf = out_pic.get_vbuf().unwrap();
449 let yoff = obuf.get_offset(0);
450 let uoff = obuf.get_offset(1);
451 let voff = obuf.get_offset(2);
452 let odata = obuf.get_data();
453 assert_eq!(odata[yoff], 42);
454 assert!(((odata[uoff] ^ 0x80) as i8).abs() <= 1);
455 assert!(((odata[voff] ^ 0x80) as i8).abs() <= 1);
456 let mut scaler = NAScale::new(ofmt, ifmt).unwrap();
457 scaler.convert(&out_pic, &mut in_pic).unwrap();
458 let obuf = in_pic.get_vbuf().unwrap();
459 let odata = obuf.get_data();
460 assert_eq!(odata[0], 42);
461 }
462 #[test]
463 fn test_scale() {
464 let mut in_pic = alloc_video_buffer(NAVideoInfo::new(2, 2, false, RGB565_FORMAT), 3).unwrap();
465 fill_pic(&mut in_pic, 42);
466 let mut out_pic = alloc_video_buffer(NAVideoInfo::new(3, 3, false, RGB565_FORMAT), 3).unwrap();
467 fill_pic(&mut out_pic, 0);
468 let ifmt = get_scale_fmt_from_pic(&in_pic);
469 let ofmt = get_scale_fmt_from_pic(&out_pic);
470 let mut scaler = NAScale::new(ifmt, ofmt).unwrap();
471 scaler.convert(&in_pic, &mut out_pic).unwrap();
472 let obuf = out_pic.get_vbuf16().unwrap();
473 let odata = obuf.get_data();
474 assert_eq!(odata[0], 42);
475 }
476 #[test]
477 fn test_scale_and_convert() {
478 let mut in_pic = alloc_video_buffer(NAVideoInfo::new(7, 3, false, RGB565_FORMAT), 3).unwrap();
479 fill_pic(&mut in_pic, 42);
480 let mut out_pic = alloc_video_buffer(NAVideoInfo::new(4, 4, false, YUV420_FORMAT), 3).unwrap();
481 fill_pic(&mut out_pic, 0);
482 let ifmt = get_scale_fmt_from_pic(&in_pic);
483 let ofmt = get_scale_fmt_from_pic(&out_pic);
484 let mut scaler = NAScale::new(ifmt, ofmt).unwrap();
485 scaler.convert(&in_pic, &mut out_pic).unwrap();
486 let obuf = out_pic.get_vbuf().unwrap();
487 let yoff = obuf.get_offset(0);
488 let uoff = obuf.get_offset(1);
489 let voff = obuf.get_offset(2);
490 let odata = obuf.get_data();
491 assert_eq!(odata[yoff], 28);
492 assert_eq!(odata[uoff], 154);
493 assert_eq!(odata[voff], 103);
494 }
4b459d0b
KS
495 #[test]
496 fn test_scale_and_convert_to_pal() {
497 let mut in_pic = alloc_video_buffer(NAVideoInfo::new(7, 3, false, YUV420_FORMAT), 3).unwrap();
498 fill_pic(&mut in_pic, 142);
499 let mut out_pic = alloc_video_buffer(NAVideoInfo::new(4, 4, false, PAL8_FORMAT), 0).unwrap();
500 fill_pic(&mut out_pic, 0);
501 let ifmt = get_scale_fmt_from_pic(&in_pic);
502 let ofmt = get_scale_fmt_from_pic(&out_pic);
503 let mut scaler = NAScale::new(ifmt, ofmt).unwrap();
504 scaler.convert(&in_pic, &mut out_pic).unwrap();
505 let obuf = out_pic.get_vbuf().unwrap();
506 let dataoff = obuf.get_offset(0);
507 let paloff = obuf.get_offset(1);
508 let odata = obuf.get_data();
509 assert_eq!(odata[dataoff], 0);
510 assert_eq!(odata[paloff], 157);
511 assert_eq!(odata[paloff + 1], 99);
512 assert_eq!(odata[paloff + 2], 170);
513 }
03accf76 514}