+//! Image conversion functionality.
+
+//! # Examples
+//!
+//! Convert input image into YUV one and scale down two times.
+//! ```no_run
+//! use nihav_core::scale::*;
+//! use nihav_core::formats::{RGB24_FORMAT, YUV420_FORMAT};
+//! use nihav_core::frame::{alloc_video_buffer, NAVideoInfo};
+//!
+//! let mut in_pic = alloc_video_buffer(NAVideoInfo::new(640, 480, false, RGB24_FORMAT), 4).unwrap();
+//! let mut out_pic = alloc_video_buffer(NAVideoInfo::new(320, 240, false, YUV420_FORMAT), 4).unwrap();
+//! let in_fmt = get_scale_fmt_from_pic(&in_pic);
+//! let out_fmt = get_scale_fmt_from_pic(&out_pic);
+//! let mut scaler = NAScale::new(in_fmt, out_fmt).unwrap();
+//! scaler.convert(&in_pic, &mut out_pic).unwrap();
+//! ```
use crate::frame::*;
mod kernel;
mod colorcvt;
mod repack;
+#[allow(clippy::module_inception)]
mod scale;
+mod palette;
+
+pub use crate::scale::palette::{palettise_frame, QuantisationMode, PaletteSearchMode};
+
+/// Image format information used by the converter.
#[derive(Clone,Copy,PartialEq)]
pub struct ScaleInfo {
+ /// Pixel format description.
pub fmt: NAPixelFormaton,
+ /// Image width.
pub width: usize,
+ /// Image height.
pub height: usize,
}
}
}
+/// A list specifying general image conversion errors.
#[derive(Debug,Clone,Copy,PartialEq)]
#[allow(dead_code)]
pub enum ScaleError {
+ /// Input or output buffer contains no image data.
NoFrame,
+ /// Allocation failed.
AllocError,
+ /// Invalid argument.
InvalidArgument,
+ /// Feature is not implemented.
NotImplemented,
+ /// Internal implementation bug.
Bug,
}
+/// A specialised `Result` type for image conversion operations.
pub type ScaleResult<T> = Result<T, ScaleError>;
/*trait Kernel {
KernelDesc { name: "pack", create: repack::create_pack },
KernelDesc { name: "unpack", create: repack::create_unpack },
KernelDesc { name: "depal", create: repack::create_depal },
+ KernelDesc { name: "palette", create: palette::create_palettise },
KernelDesc { name: "scale", create: scale::create_scale },
KernelDesc { name: "rgb_to_yuv", create: colorcvt::create_rgb2yuv },
KernelDesc { name: "yuv_to_rgb", create: colorcvt::create_yuv2rgb },
worker: Box<dyn kernel::Kernel>,
}
+/// Converts input picture information into format used by scaler.
pub fn get_scale_fmt_from_pic(pic: &NABufferType) -> ScaleInfo {
let info = pic.get_video_info().unwrap();
ScaleInfo { fmt: info.get_format(), width: info.get_width(), height: info.get_height() }
}
}
+/// Image format converter.
pub struct NAScale {
fmt_in: ScaleInfo,
fmt_out: ScaleInfo,
}
}
}
+ } else if let (Some(ref sbuf), Some(ref mut dbuf)) = (pic_in.get_vbuf16(), pic_out.get_vbuf16()) {
+ let mut same = true;
+ let num_components = sbuf.get_info().get_format().get_num_comp();
+ for i in 0..num_components {
+ if sbuf.get_stride(i) != dbuf.get_stride(i) {
+ same = false;
+ break;
+ }
+ if sbuf.get_offset(i) != dbuf.get_offset(i) {
+ same = false;
+ break;
+ }
+ }
+ if same {
+ let sdata = sbuf.get_data();
+ let ddata = dbuf.get_data_mut().unwrap();
+ ddata.copy_from_slice(&sdata[0..]);
+ } else {
+ let sdata = sbuf.get_data();
+ for comp in 0..num_components {
+ let (_, h) = sbuf.get_dimensions(comp);
+ let src = &sdata[sbuf.get_offset(comp)..];
+ let sstride = sbuf.get_stride(comp);
+ let doff = dbuf.get_offset(comp);
+ let dstride = dbuf.get_stride(comp);
+ let ddata = dbuf.get_data_mut().unwrap();
+ let dst = &mut ddata[doff..];
+ let copy_size = sstride.min(dstride);
+ for (dline, sline) in dst.chunks_exact_mut(dstride).take(h).zip(src.chunks_exact(sstride)) {
+ (&mut dline[..copy_size]).copy_from_slice(&sline[..copy_size]);
+ }
+ }
+ }
} else {
unimplemented!();
}
let needs_convert = inname != outname;
let scale_before_cvt = is_better_fmt(&ifmt, &ofmt) && needs_convert
&& (ofmt.fmt.get_max_subsampling() == 0);
+ let needs_palettise = ofmt.fmt.palette;
//todo stages for model and gamma conversion
let mut stages: Option<Stage> = None;
cur_fmt = new_stage.fmt_out;
add_stage!(stages, new_stage);
}
- if needs_pack {
+ if needs_pack && !needs_palettise {
println!("[adding pack]");
let new_stage = Stage::new("pack", &cur_fmt, &ofmt)?;
//cur_fmt = new_stage.fmt_out;
add_stage!(stages, new_stage);
}
+ if needs_palettise {
+println!("[adding palettise]");
+ let new_stage = Stage::new("palette", &cur_fmt, &ofmt)?;
+ //cur_fmt = new_stage.fmt_out;
+ add_stage!(stages, new_stage);
+ }
if let Some(ref mut head) = stages {
head.drop_last_tmp();
}
-
+
Ok(stages)
}
}
}
+/// Flips the picture contents.
pub fn flip_picture(pic: &mut NABufferType) -> ScaleResult<()> {
match pic {
NABufferType::Video(ref mut vb) => {
}
},
NABufferType::Video16(ref mut vb) => {
- let ncomp = vb.get_num_components();
+ let ncomp = vb.get_num_components().max(1);
for comp in 0..ncomp {
let off = vb.get_offset(comp);
let stride = vb.get_stride(comp);
}
},
NABufferType::Video32(ref mut vb) => {
- let ncomp = vb.get_num_components();
+ let ncomp = vb.get_num_components().max(1);
for comp in 0..ncomp {
let off = vb.get_offset(comp);
let stride = vb.get_stride(comp);
}
impl NAScale {
+ /// Constructs a new `NAScale` instance.
pub fn new(fmt_in: ScaleInfo, fmt_out: ScaleInfo) -> ScaleResult<Self> {
let pipeline;
let just_convert = (fmt_in.width == fmt_out.width) && (fmt_in.height == fmt_out.height);
}
Ok(Self { fmt_in, fmt_out, just_convert, pipeline })
}
+ /// Checks whether requested conversion operation is needed at all.
pub fn needs_processing(&self) -> bool { self.pipeline.is_some() }
+ /// Returns the input image format.
pub fn get_in_fmt(&self) -> ScaleInfo { self.fmt_in }
+ /// Returns the output image format.
pub fn get_out_fmt(&self) -> ScaleInfo { self.fmt_out }
+ /// Performs the image format conversion.
pub fn convert(&mut self, pic_in: &NABufferType, pic_out: &mut NABufferType) -> ScaleResult<()> {
let in_info = pic_in.get_video_info();
let out_info = pic_out.get_video_info();
assert_eq!(odata[uoff], 154);
assert_eq!(odata[voff], 103);
}
+ #[test]
+ fn test_scale_and_convert_to_pal() {
+ let mut in_pic = alloc_video_buffer(NAVideoInfo::new(7, 3, false, YUV420_FORMAT), 3).unwrap();
+ fill_pic(&mut in_pic, 142);
+ let mut out_pic = alloc_video_buffer(NAVideoInfo::new(4, 4, false, PAL8_FORMAT), 0).unwrap();
+ fill_pic(&mut out_pic, 0);
+ let ifmt = get_scale_fmt_from_pic(&in_pic);
+ let ofmt = get_scale_fmt_from_pic(&out_pic);
+ let mut scaler = NAScale::new(ifmt, ofmt).unwrap();
+ scaler.convert(&in_pic, &mut out_pic).unwrap();
+ let obuf = out_pic.get_vbuf().unwrap();
+ let dataoff = obuf.get_offset(0);
+ let paloff = obuf.get_offset(1);
+ let odata = obuf.get_data();
+ assert_eq!(odata[dataoff], 0);
+ assert_eq!(odata[paloff], 157);
+ assert_eq!(odata[paloff + 1], 99);
+ assert_eq!(odata[paloff + 2], 170);
+ }
}