cinepakenc: switch default quantisation mode to median cut
[nihav.git] / nihav-core / src / scale / mod.rs
CommitLineData
30d57e4a
KS
1//! Image conversion functionality.
2
3//! # Examples
4//!
5//! Convert input image into YUV one and scale down two times.
6//! ```no_run
7//! use nihav_core::scale::*;
8//! use nihav_core::formats::{RGB24_FORMAT, YUV420_FORMAT};
9//! use nihav_core::frame::{alloc_video_buffer, NAVideoInfo};
10//!
11//! let mut in_pic = alloc_video_buffer(NAVideoInfo::new(640, 480, false, RGB24_FORMAT), 4).unwrap();
12//! let mut out_pic = alloc_video_buffer(NAVideoInfo::new(320, 240, false, YUV420_FORMAT), 4).unwrap();
13//! let in_fmt = get_scale_fmt_from_pic(&in_pic);
14//! let out_fmt = get_scale_fmt_from_pic(&out_pic);
15//! let mut scaler = NAScale::new(in_fmt, out_fmt).unwrap();
16//! scaler.convert(&in_pic, &mut out_pic).unwrap();
17//! ```
03accf76
KS
18use crate::frame::*;
19
20mod kernel;
21
22mod colorcvt;
23mod repack;
24mod scale;
25
4b459d0b
KS
26mod palette;
27
28pub use crate::scale::palette::{palettise_frame, QuantisationMode, PaletteSearchMode};
29
30d57e4a 30/// Image format information used by the converter.
03accf76
KS
31#[derive(Clone,Copy,PartialEq)]
32pub struct ScaleInfo {
30d57e4a 33 /// Pixel format description.
03accf76 34 pub fmt: NAPixelFormaton,
30d57e4a 35 /// Image width.
03accf76 36 pub width: usize,
30d57e4a 37 /// Image height.
03accf76
KS
38 pub height: usize,
39}
40
41impl std::fmt::Display for ScaleInfo {
42 fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
43 write!(f, "({}x{}, {})", self.width, self.height, self.fmt)
44 }
45}
46
30d57e4a 47/// A list specifying general image conversion errors.
03accf76
KS
48#[derive(Debug,Clone,Copy,PartialEq)]
49#[allow(dead_code)]
50pub enum ScaleError {
30d57e4a 51 /// Input or output buffer contains no image data.
03accf76 52 NoFrame,
30d57e4a 53 /// Allocation failed.
03accf76 54 AllocError,
30d57e4a 55 /// Invalid argument.
03accf76 56 InvalidArgument,
30d57e4a 57 /// Feature is not implemented.
03accf76 58 NotImplemented,
30d57e4a 59 /// Internal implementation bug.
03accf76
KS
60 Bug,
61}
62
30d57e4a 63/// A specialised `Result` type for image conversion operations.
03accf76
KS
64pub type ScaleResult<T> = Result<T, ScaleError>;
65
66/*trait Kernel {
67 fn init(&mut self, in_fmt: &ScaleInfo, dest_fmt: &ScaleInfo) -> ScaleResult<NABufferType>;
68 fn process(&mut self, pic_in: &NABufferType, pic_out: &mut NABufferType);
69}*/
70
71struct KernelDesc {
72 name: &'static str,
6011e201 73 create: fn () -> Box<dyn kernel::Kernel>,
03accf76
KS
74}
75
76impl KernelDesc {
6011e201 77 fn find(name: &str) -> ScaleResult<Box<dyn kernel::Kernel>> {
03accf76
KS
78 for kern in KERNELS.iter() {
79 if kern.name == name {
80 return Ok((kern.create)());
81 }
82 }
83 Err(ScaleError::InvalidArgument)
84 }
85}
86
87const KERNELS: &[KernelDesc] = &[
88 KernelDesc { name: "pack", create: repack::create_pack },
89 KernelDesc { name: "unpack", create: repack::create_unpack },
90 KernelDesc { name: "depal", create: repack::create_depal },
4b459d0b 91 KernelDesc { name: "palette", create: palette::create_palettise },
03accf76
KS
92 KernelDesc { name: "scale", create: scale::create_scale },
93 KernelDesc { name: "rgb_to_yuv", create: colorcvt::create_rgb2yuv },
94 KernelDesc { name: "yuv_to_rgb", create: colorcvt::create_yuv2rgb },
95];
96
97struct Stage {
98 fmt_out: ScaleInfo,
99 tmp_pic: NABufferType,
100 next: Option<Box<Stage>>,
6011e201 101 worker: Box<dyn kernel::Kernel>,
03accf76
KS
102}
103
30d57e4a 104/// Converts input picture information into format used by scaler.
03accf76
KS
105pub fn get_scale_fmt_from_pic(pic: &NABufferType) -> ScaleInfo {
106 let info = pic.get_video_info().unwrap();
107 ScaleInfo { fmt: info.get_format(), width: info.get_width(), height: info.get_height() }
108}
109
110impl Stage {
111 fn new(name: &str, in_fmt: &ScaleInfo, dest_fmt: &ScaleInfo) -> ScaleResult<Self> {
112 let mut worker = KernelDesc::find(name)?;
113 let tmp_pic = worker.init(in_fmt, dest_fmt)?;
114 let fmt_out = get_scale_fmt_from_pic(&tmp_pic);
115 Ok(Self { fmt_out, tmp_pic, next: None, worker })
116 }
117 fn add(&mut self, new: Stage) {
118 if let Some(ref mut next) = self.next {
119 next.add(new);
120 } else {
121 self.next = Some(Box::new(new));
122 }
123 }
124 fn process(&mut self, pic_in: &NABufferType, pic_out: &mut NABufferType) -> ScaleResult<()> {
125 if let Some(ref mut nextstage) = self.next {
126 self.worker.process(pic_in, &mut self.tmp_pic);
127 nextstage.process(&self.tmp_pic, pic_out)?;
128 } else {
129 self.worker.process(pic_in, pic_out);
130 }
131 Ok(())
132 }
133 fn drop_last_tmp(&mut self) {
134 if let Some(ref mut nextstage) = self.next {
135 nextstage.drop_last_tmp();
136 } else {
137 self.tmp_pic = NABufferType::None;
138 }
139 }
140}
141
30d57e4a 142/// Image format converter.
03accf76
KS
143pub struct NAScale {
144 fmt_in: ScaleInfo,
145 fmt_out: ScaleInfo,
146 just_convert: bool,
147 pipeline: Option<Stage>,
148}
149
150fn check_format(in_fmt: NAVideoInfo, ref_fmt: &ScaleInfo, just_convert: bool) -> ScaleResult<()> {
151 if in_fmt.get_format() != ref_fmt.fmt { return Err(ScaleError::InvalidArgument); }
152 if !just_convert && (in_fmt.get_width() != ref_fmt.width || in_fmt.get_height() != ref_fmt.height) {
153 return Err(ScaleError::InvalidArgument);
154 }
155 Ok(())
156}
157
158fn copy(pic_in: &NABufferType, pic_out: &mut NABufferType)
159{
160 if let (Some(ref sbuf), Some(ref mut dbuf)) = (pic_in.get_vbuf(), pic_out.get_vbuf()) {
79ec1d51
KS
161 let mut same = true;
162 let num_components = sbuf.get_info().get_format().get_num_comp();
163 for i in 0..num_components {
164 if sbuf.get_stride(i) != dbuf.get_stride(i) {
165 same = false;
166 break;
167 }
168 if sbuf.get_offset(i) != dbuf.get_offset(i) {
169 same = false;
170 break;
171 }
172 }
173 if same {
174 let sdata = sbuf.get_data();
175 let ddata = dbuf.get_data_mut().unwrap();
176 ddata.copy_from_slice(&sdata[0..]);
177 } else {
178 let sdata = sbuf.get_data();
179 for comp in 0..num_components {
180 let (_, h) = sbuf.get_dimensions(comp);
181 let src = &sdata[sbuf.get_offset(comp)..];
182 let sstride = sbuf.get_stride(comp);
183 let doff = dbuf.get_offset(comp);
184 let dstride = dbuf.get_stride(comp);
185 let ddata = dbuf.get_data_mut().unwrap();
186 let dst = &mut ddata[doff..];
187 let copy_size = sstride.min(dstride);
188 for (dline, sline) in dst.chunks_exact_mut(dstride).take(h).zip(src.chunks_exact(sstride)) {
189 (&mut dline[..copy_size]).copy_from_slice(&sline[..copy_size]);
190 }
191 }
192 }
03accf76
KS
193 } else {
194 unimplemented!();
195 }
196}
197
198macro_rules! add_stage {
199 ($head:expr, $new:expr) => {
200 if let Some(ref mut h) = $head {
201 h.add($new);
202 } else {
203 $head = Some($new);
204 }
205 };
206}
207fn is_better_fmt(a: &ScaleInfo, b: &ScaleInfo) -> bool {
208 if (a.width >= b.width) && (a.height >= b.height) {
209 return true;
210 }
211 if a.fmt.get_max_depth() > b.fmt.get_max_depth() {
212 return true;
213 }
214 if a.fmt.get_max_subsampling() < b.fmt.get_max_subsampling() {
215 return true;
216 }
217 false
218}
219fn build_pipeline(ifmt: &ScaleInfo, ofmt: &ScaleInfo, just_convert: bool) -> ScaleResult<Option<Stage>> {
220 let inname = ifmt.fmt.get_model().get_short_name();
221 let outname = ofmt.fmt.get_model().get_short_name();
222
223println!("convert {} -> {}", ifmt, ofmt);
e243ceb4 224 let needs_scale = if (ofmt.fmt.get_max_subsampling() > 0) &&
03accf76 225 (ofmt.fmt.get_max_subsampling() != ifmt.fmt.get_max_subsampling()) {
e243ceb4
KS
226 true
227 } else {
228 !just_convert
229 };
e9d8cce7 230 let needs_unpack = !ifmt.fmt.is_unpacked();
03accf76 231 let needs_pack = !ofmt.fmt.is_unpacked();
e243ceb4 232 let needs_convert = inname != outname;
03accf76
KS
233 let scale_before_cvt = is_better_fmt(&ifmt, &ofmt) && needs_convert
234 && (ofmt.fmt.get_max_subsampling() == 0);
4b459d0b 235 let needs_palettise = ofmt.fmt.palette;
03accf76
KS
236//todo stages for model and gamma conversion
237
238 let mut stages: Option<Stage> = None;
239 let mut cur_fmt = *ifmt;
240
241 if needs_unpack {
242println!("[adding unpack]");
e243ceb4
KS
243 let new_stage = if !cur_fmt.fmt.is_paletted() {
244 Stage::new("unpack", &cur_fmt, &ofmt)?
245 } else {
246 Stage::new("depal", &cur_fmt, &ofmt)?
247 };
03accf76
KS
248 cur_fmt = new_stage.fmt_out;
249 add_stage!(stages, new_stage);
250 }
251 if needs_scale && scale_before_cvt {
252println!("[adding scale]");
253 let new_stage = Stage::new("scale", &cur_fmt, &ofmt)?;
254 cur_fmt = new_stage.fmt_out;
255 add_stage!(stages, new_stage);
256 }
257 if needs_convert {
258println!("[adding convert]");
259 let cvtname = format!("{}_to_{}", inname, outname);
260println!("[{}]", cvtname);
261 let new_stage = Stage::new(&cvtname, &cur_fmt, &ofmt)?;
262//todo if fails try converting via RGB or YUV
263 cur_fmt = new_stage.fmt_out;
264 add_stage!(stages, new_stage);
265//todo alpha plane copy/add
266 }
267 if needs_scale && !scale_before_cvt {
268println!("[adding scale]");
269 let new_stage = Stage::new("scale", &cur_fmt, &ofmt)?;
270 cur_fmt = new_stage.fmt_out;
271 add_stage!(stages, new_stage);
272 }
4b459d0b 273 if needs_pack && !needs_palettise {
03accf76
KS
274println!("[adding pack]");
275 let new_stage = Stage::new("pack", &cur_fmt, &ofmt)?;
276 //cur_fmt = new_stage.fmt_out;
277 add_stage!(stages, new_stage);
278 }
4b459d0b
KS
279 if needs_palettise {
280println!("[adding palettise]");
281 let new_stage = Stage::new("palette", &cur_fmt, &ofmt)?;
282 //cur_fmt = new_stage.fmt_out;
283 add_stage!(stages, new_stage);
284 }
03accf76
KS
285
286 if let Some(ref mut head) = stages {
287 head.drop_last_tmp();
288 }
d24468d9 289
03accf76
KS
290 Ok(stages)
291}
292
085742a3
KS
293fn swap_plane<T:Copy>(data: &mut [T], stride: usize, h: usize, line0: &mut [T], line1: &mut [T]) {
294 let mut doff0 = 0;
295 let mut doff1 = stride * (h - 1);
296 for _ in 0..h/2 {
297 line0.copy_from_slice(&data[doff0..][..stride]);
298 line1.copy_from_slice(&data[doff1..][..stride]);
299 (&mut data[doff1..][..stride]).copy_from_slice(line0);
300 (&mut data[doff0..][..stride]).copy_from_slice(line1);
301 doff0 += stride;
302 doff1 -= stride;
303 }
304}
305
30d57e4a 306/// Flips the picture contents.
085742a3
KS
307pub fn flip_picture(pic: &mut NABufferType) -> ScaleResult<()> {
308 match pic {
309 NABufferType::Video(ref mut vb) => {
310 let ncomp = vb.get_num_components();
311 for comp in 0..ncomp {
312 let off = vb.get_offset(comp);
313 let stride = vb.get_stride(comp);
314 let (_, h) = vb.get_dimensions(comp);
315 let data = vb.get_data_mut().unwrap();
316 let mut line0 = vec![0; stride];
317 let mut line1 = vec![0; stride];
318 swap_plane(&mut data[off..], stride, h, line0.as_mut_slice(), line1.as_mut_slice());
319 }
320 },
321 NABufferType::Video16(ref mut vb) => {
322 let ncomp = vb.get_num_components();
323 for comp in 0..ncomp {
324 let off = vb.get_offset(comp);
325 let stride = vb.get_stride(comp);
326 let (_, h) = vb.get_dimensions(comp);
327 let data = vb.get_data_mut().unwrap();
328 let mut line0 = vec![0; stride];
329 let mut line1 = vec![0; stride];
330 swap_plane(&mut data[off..], stride, h, line0.as_mut_slice(), line1.as_mut_slice());
331 }
332 },
333 NABufferType::Video32(ref mut vb) => {
334 let ncomp = vb.get_num_components();
335 for comp in 0..ncomp {
336 let off = vb.get_offset(comp);
337 let stride = vb.get_stride(comp);
338 let (_, h) = vb.get_dimensions(comp);
339 let data = vb.get_data_mut().unwrap();
340 let mut line0 = vec![0; stride];
341 let mut line1 = vec![0; stride];
342 swap_plane(&mut data[off..], stride, h, line0.as_mut_slice(), line1.as_mut_slice());
343 }
344 },
345 NABufferType::VideoPacked(ref mut vb) => {
346 let ncomp = vb.get_num_components();
347 for comp in 0..ncomp {
348 let off = vb.get_offset(comp);
349 let stride = vb.get_stride(comp);
350 let (_, h) = vb.get_dimensions(comp);
351 let data = vb.get_data_mut().unwrap();
352 let mut line0 = vec![0; stride];
353 let mut line1 = vec![0; stride];
354 swap_plane(&mut data[off..], stride, h, line0.as_mut_slice(), line1.as_mut_slice());
355 }
356 },
357 _ => { return Err(ScaleError::InvalidArgument); },
358 };
359 Ok(())
360}
361
03accf76 362impl NAScale {
30d57e4a 363 /// Constructs a new `NAScale` instance.
03accf76
KS
364 pub fn new(fmt_in: ScaleInfo, fmt_out: ScaleInfo) -> ScaleResult<Self> {
365 let pipeline;
366 let just_convert = (fmt_in.width == fmt_out.width) && (fmt_in.height == fmt_out.height);
367 if fmt_in != fmt_out {
368 pipeline = build_pipeline(&fmt_in, &fmt_out, just_convert)?;
369 } else {
370 pipeline = None;
371 }
372 Ok(Self { fmt_in, fmt_out, just_convert, pipeline })
373 }
30d57e4a 374 /// Checks whether requested conversion operation is needed at all.
03accf76 375 pub fn needs_processing(&self) -> bool { self.pipeline.is_some() }
30d57e4a 376 /// Returns the input image format.
03accf76 377 pub fn get_in_fmt(&self) -> ScaleInfo { self.fmt_in }
30d57e4a 378 /// Returns the output image format.
03accf76 379 pub fn get_out_fmt(&self) -> ScaleInfo { self.fmt_out }
30d57e4a 380 /// Performs the image format conversion.
03accf76
KS
381 pub fn convert(&mut self, pic_in: &NABufferType, pic_out: &mut NABufferType) -> ScaleResult<()> {
382 let in_info = pic_in.get_video_info();
383 let out_info = pic_out.get_video_info();
384 if in_info.is_none() || out_info.is_none() { return Err(ScaleError::InvalidArgument); }
385 let in_info = in_info.unwrap();
386 let out_info = out_info.unwrap();
387 if self.just_convert &&
388 (in_info.get_width() != out_info.get_width() || in_info.get_height() != out_info.get_height()) {
389 return Err(ScaleError::InvalidArgument);
390 }
085742a3 391 let needs_flip = in_info.is_flipped() ^ out_info.is_flipped();
03accf76
KS
392 check_format(in_info, &self.fmt_in, self.just_convert)?;
393 check_format(out_info, &self.fmt_out, self.just_convert)?;
085742a3
KS
394 let ret = if let Some(ref mut pipe) = self.pipeline {
395 pipe.process(pic_in, pic_out)
396 } else {
397 copy(pic_in, pic_out);
398 Ok(())
399 };
400 if ret.is_ok() && needs_flip {
401 flip_picture(pic_out)?;
03accf76 402 }
085742a3 403 ret
03accf76
KS
404 }
405}
406
407#[cfg(test)]
408mod test {
409 use super::*;
410
411 fn fill_pic(pic: &mut NABufferType, val: u8) {
412 if let Some(ref mut buf) = pic.get_vbuf() {
413 let data = buf.get_data_mut().unwrap();
414 for el in data.iter_mut() { *el = val; }
415 } else if let Some(ref mut buf) = pic.get_vbuf16() {
416 let data = buf.get_data_mut().unwrap();
417 for el in data.iter_mut() { *el = val as u16; }
418 } else if let Some(ref mut buf) = pic.get_vbuf32() {
419 let data = buf.get_data_mut().unwrap();
420 for el in data.iter_mut() { *el = (val as u32) * 0x01010101; }
421 }
422 }
423 #[test]
424 fn test_convert() {
425 let mut in_pic = alloc_video_buffer(NAVideoInfo::new(1, 1, false, RGB565_FORMAT), 3).unwrap();
426 fill_pic(&mut in_pic, 42);
427 let mut out_pic = alloc_video_buffer(NAVideoInfo::new(1, 1, false, RGB24_FORMAT), 3).unwrap();
428 fill_pic(&mut out_pic, 0);
429 let ifmt = get_scale_fmt_from_pic(&in_pic);
430 let ofmt = get_scale_fmt_from_pic(&out_pic);
431 let mut scaler = NAScale::new(ifmt, ofmt).unwrap();
432 scaler.convert(&in_pic, &mut out_pic).unwrap();
433 let obuf = out_pic.get_vbuf().unwrap();
434 let odata = obuf.get_data();
435 assert_eq!(odata[0], 0x0);
436 assert_eq!(odata[1], 0x4);
437 assert_eq!(odata[2], 0x52);
438
439 let mut in_pic = alloc_video_buffer(NAVideoInfo::new(4, 4, false, RGB24_FORMAT), 3).unwrap();
440 fill_pic(&mut in_pic, 42);
441 let mut out_pic = alloc_video_buffer(NAVideoInfo::new(4, 4, false, YUV420_FORMAT), 3).unwrap();
442 fill_pic(&mut out_pic, 0);
443 let ifmt = get_scale_fmt_from_pic(&in_pic);
444 let ofmt = get_scale_fmt_from_pic(&out_pic);
445 let mut scaler = NAScale::new(ifmt, ofmt).unwrap();
446 scaler.convert(&in_pic, &mut out_pic).unwrap();
447 let obuf = out_pic.get_vbuf().unwrap();
448 let yoff = obuf.get_offset(0);
449 let uoff = obuf.get_offset(1);
450 let voff = obuf.get_offset(2);
451 let odata = obuf.get_data();
452 assert_eq!(odata[yoff], 42);
453 assert!(((odata[uoff] ^ 0x80) as i8).abs() <= 1);
454 assert!(((odata[voff] ^ 0x80) as i8).abs() <= 1);
455 let mut scaler = NAScale::new(ofmt, ifmt).unwrap();
456 scaler.convert(&out_pic, &mut in_pic).unwrap();
457 let obuf = in_pic.get_vbuf().unwrap();
458 let odata = obuf.get_data();
459 assert_eq!(odata[0], 42);
460 }
461 #[test]
462 fn test_scale() {
463 let mut in_pic = alloc_video_buffer(NAVideoInfo::new(2, 2, false, RGB565_FORMAT), 3).unwrap();
464 fill_pic(&mut in_pic, 42);
465 let mut out_pic = alloc_video_buffer(NAVideoInfo::new(3, 3, false, RGB565_FORMAT), 3).unwrap();
466 fill_pic(&mut out_pic, 0);
467 let ifmt = get_scale_fmt_from_pic(&in_pic);
468 let ofmt = get_scale_fmt_from_pic(&out_pic);
469 let mut scaler = NAScale::new(ifmt, ofmt).unwrap();
470 scaler.convert(&in_pic, &mut out_pic).unwrap();
471 let obuf = out_pic.get_vbuf16().unwrap();
472 let odata = obuf.get_data();
473 assert_eq!(odata[0], 42);
474 }
475 #[test]
476 fn test_scale_and_convert() {
477 let mut in_pic = alloc_video_buffer(NAVideoInfo::new(7, 3, false, RGB565_FORMAT), 3).unwrap();
478 fill_pic(&mut in_pic, 42);
479 let mut out_pic = alloc_video_buffer(NAVideoInfo::new(4, 4, false, YUV420_FORMAT), 3).unwrap();
480 fill_pic(&mut out_pic, 0);
481 let ifmt = get_scale_fmt_from_pic(&in_pic);
482 let ofmt = get_scale_fmt_from_pic(&out_pic);
483 let mut scaler = NAScale::new(ifmt, ofmt).unwrap();
484 scaler.convert(&in_pic, &mut out_pic).unwrap();
485 let obuf = out_pic.get_vbuf().unwrap();
486 let yoff = obuf.get_offset(0);
487 let uoff = obuf.get_offset(1);
488 let voff = obuf.get_offset(2);
489 let odata = obuf.get_data();
490 assert_eq!(odata[yoff], 28);
491 assert_eq!(odata[uoff], 154);
492 assert_eq!(odata[voff], 103);
493 }
4b459d0b
KS
494 #[test]
495 fn test_scale_and_convert_to_pal() {
496 let mut in_pic = alloc_video_buffer(NAVideoInfo::new(7, 3, false, YUV420_FORMAT), 3).unwrap();
497 fill_pic(&mut in_pic, 142);
498 let mut out_pic = alloc_video_buffer(NAVideoInfo::new(4, 4, false, PAL8_FORMAT), 0).unwrap();
499 fill_pic(&mut out_pic, 0);
500 let ifmt = get_scale_fmt_from_pic(&in_pic);
501 let ofmt = get_scale_fmt_from_pic(&out_pic);
502 let mut scaler = NAScale::new(ifmt, ofmt).unwrap();
503 scaler.convert(&in_pic, &mut out_pic).unwrap();
504 let obuf = out_pic.get_vbuf().unwrap();
505 let dataoff = obuf.get_offset(0);
506 let paloff = obuf.get_offset(1);
507 let odata = obuf.get_data();
508 assert_eq!(odata[dataoff], 0);
509 assert_eq!(odata[paloff], 157);
510 assert_eq!(odata[paloff + 1], 99);
511 assert_eq!(odata[paloff + 2], 170);
512 }
03accf76 513}