+//! Image conversion functionality.
+
+//! # Examples
+//!
+//! Convert input image into YUV one and scale down two times.
+//! ```no_run
+//! use nihav_core::scale::*;
+//! use nihav_core::formats::{RGB24_FORMAT, YUV420_FORMAT};
+//! use nihav_core::frame::{alloc_video_buffer, NAVideoInfo};
+//!
+//! let mut in_pic = alloc_video_buffer(NAVideoInfo::new(640, 480, false, RGB24_FORMAT), 4).unwrap();
+//! let mut out_pic = alloc_video_buffer(NAVideoInfo::new(320, 240, false, YUV420_FORMAT), 4).unwrap();
+//! let in_fmt = get_scale_fmt_from_pic(&in_pic);
+//! let out_fmt = get_scale_fmt_from_pic(&out_pic);
+//! let mut scaler = NAScale::new(in_fmt, out_fmt).unwrap();
+//! scaler.convert(&in_pic, &mut out_pic).unwrap();
+//! ```
use crate::frame::*;
mod kernel;
mod repack;
mod scale;
+/// Image format information used by the converter.
#[derive(Clone,Copy,PartialEq)]
pub struct ScaleInfo {
+ /// Pixel format description.
pub fmt: NAPixelFormaton,
+ /// Image width.
pub width: usize,
+ /// Image height.
pub height: usize,
}
}
}
+/// A list specifying general image conversion errors.
#[derive(Debug,Clone,Copy,PartialEq)]
#[allow(dead_code)]
pub enum ScaleError {
+ /// Input or output buffer contains no image data.
NoFrame,
+ /// Allocation failed.
AllocError,
+ /// Invalid argument.
InvalidArgument,
+ /// Feature is not implemented.
NotImplemented,
+ /// Internal implementation bug.
Bug,
}
+/// A specialised `Result` type for image conversion operations.
pub type ScaleResult<T> = Result<T, ScaleError>;
/*trait Kernel {
struct KernelDesc {
name: &'static str,
- create: fn () -> Box<kernel::Kernel>,
+ create: fn () -> Box<dyn kernel::Kernel>,
}
impl KernelDesc {
- fn find(name: &str) -> ScaleResult<Box<kernel::Kernel>> {
+ fn find(name: &str) -> ScaleResult<Box<dyn kernel::Kernel>> {
for kern in KERNELS.iter() {
if kern.name == name {
return Ok((kern.create)());
fmt_out: ScaleInfo,
tmp_pic: NABufferType,
next: Option<Box<Stage>>,
- worker: Box<kernel::Kernel>,
+ worker: Box<dyn kernel::Kernel>,
}
+/// Converts input picture information into format used by scaler.
pub fn get_scale_fmt_from_pic(pic: &NABufferType) -> ScaleInfo {
let info = pic.get_video_info().unwrap();
ScaleInfo { fmt: info.get_format(), width: info.get_width(), height: info.get_height() }
}
}
+/// Image format converter.
pub struct NAScale {
fmt_in: ScaleInfo,
fmt_out: ScaleInfo,
fn copy(pic_in: &NABufferType, pic_out: &mut NABufferType)
{
if let (Some(ref sbuf), Some(ref mut dbuf)) = (pic_in.get_vbuf(), pic_out.get_vbuf()) {
- let sdata = sbuf.get_data();
- let ddata = dbuf.get_data_mut().unwrap();
- ddata.copy_from_slice(&sdata[0..]);
+ let mut same = true;
+ let num_components = sbuf.get_info().get_format().get_num_comp();
+ for i in 0..num_components {
+ if sbuf.get_stride(i) != dbuf.get_stride(i) {
+ same = false;
+ break;
+ }
+ if sbuf.get_offset(i) != dbuf.get_offset(i) {
+ same = false;
+ break;
+ }
+ }
+ if same {
+ let sdata = sbuf.get_data();
+ let ddata = dbuf.get_data_mut().unwrap();
+ ddata.copy_from_slice(&sdata[0..]);
+ } else {
+ let sdata = sbuf.get_data();
+ for comp in 0..num_components {
+ let (_, h) = sbuf.get_dimensions(comp);
+ let src = &sdata[sbuf.get_offset(comp)..];
+ let sstride = sbuf.get_stride(comp);
+ let doff = dbuf.get_offset(comp);
+ let dstride = dbuf.get_stride(comp);
+ let ddata = dbuf.get_data_mut().unwrap();
+ let dst = &mut ddata[doff..];
+ let copy_size = sstride.min(dstride);
+ for (dline, sline) in dst.chunks_exact_mut(dstride).take(h).zip(src.chunks_exact(sstride)) {
+ (&mut dline[..copy_size]).copy_from_slice(&sline[..copy_size]);
+ }
+ }
+ }
} else {
unimplemented!();
}
let outname = ofmt.fmt.get_model().get_short_name();
println!("convert {} -> {}", ifmt, ofmt);
- let mut needs_scale = !just_convert;
- if (ofmt.fmt.get_max_subsampling() > 0) &&
+ let needs_scale = if (ofmt.fmt.get_max_subsampling() > 0) &&
(ofmt.fmt.get_max_subsampling() != ifmt.fmt.get_max_subsampling()) {
- needs_scale = true;
- }
- let needs_unpack = needs_scale || !ifmt.fmt.is_unpacked();
+ true
+ } else {
+ !just_convert
+ };
+ let needs_unpack = !ifmt.fmt.is_unpacked();
let needs_pack = !ofmt.fmt.is_unpacked();
- let mut needs_convert = false;
- if inname != outname {
- needs_convert = true;
- }
+ let needs_convert = inname != outname;
let scale_before_cvt = is_better_fmt(&ifmt, &ofmt) && needs_convert
&& (ofmt.fmt.get_max_subsampling() == 0);
//todo stages for model and gamma conversion
if needs_unpack {
println!("[adding unpack]");
- let new_stage;
- if !cur_fmt.fmt.is_paletted() {
- new_stage = Stage::new("unpack", &cur_fmt, &ofmt)?;
- } else {
- new_stage = Stage::new("depal", &cur_fmt, &ofmt)?;
- }
+ let new_stage = if !cur_fmt.fmt.is_paletted() {
+ Stage::new("unpack", &cur_fmt, &ofmt)?
+ } else {
+ Stage::new("depal", &cur_fmt, &ofmt)?
+ };
cur_fmt = new_stage.fmt_out;
add_stage!(stages, new_stage);
}
cur_fmt = new_stage.fmt_out;
add_stage!(stages, new_stage);
}
-//todo flip if needed
if needs_pack {
println!("[adding pack]");
let new_stage = Stage::new("pack", &cur_fmt, &ofmt)?;
if let Some(ref mut head) = stages {
head.drop_last_tmp();
}
-
+
Ok(stages)
}
+fn swap_plane<T:Copy>(data: &mut [T], stride: usize, h: usize, line0: &mut [T], line1: &mut [T]) {
+ let mut doff0 = 0;
+ let mut doff1 = stride * (h - 1);
+ for _ in 0..h/2 {
+ line0.copy_from_slice(&data[doff0..][..stride]);
+ line1.copy_from_slice(&data[doff1..][..stride]);
+ (&mut data[doff1..][..stride]).copy_from_slice(line0);
+ (&mut data[doff0..][..stride]).copy_from_slice(line1);
+ doff0 += stride;
+ doff1 -= stride;
+ }
+}
+
+/// Flips the picture contents.
+pub fn flip_picture(pic: &mut NABufferType) -> ScaleResult<()> {
+ match pic {
+ NABufferType::Video(ref mut vb) => {
+ let ncomp = vb.get_num_components();
+ for comp in 0..ncomp {
+ let off = vb.get_offset(comp);
+ let stride = vb.get_stride(comp);
+ let (_, h) = vb.get_dimensions(comp);
+ let data = vb.get_data_mut().unwrap();
+ let mut line0 = vec![0; stride];
+ let mut line1 = vec![0; stride];
+ swap_plane(&mut data[off..], stride, h, line0.as_mut_slice(), line1.as_mut_slice());
+ }
+ },
+ NABufferType::Video16(ref mut vb) => {
+ let ncomp = vb.get_num_components();
+ for comp in 0..ncomp {
+ let off = vb.get_offset(comp);
+ let stride = vb.get_stride(comp);
+ let (_, h) = vb.get_dimensions(comp);
+ let data = vb.get_data_mut().unwrap();
+ let mut line0 = vec![0; stride];
+ let mut line1 = vec![0; stride];
+ swap_plane(&mut data[off..], stride, h, line0.as_mut_slice(), line1.as_mut_slice());
+ }
+ },
+ NABufferType::Video32(ref mut vb) => {
+ let ncomp = vb.get_num_components();
+ for comp in 0..ncomp {
+ let off = vb.get_offset(comp);
+ let stride = vb.get_stride(comp);
+ let (_, h) = vb.get_dimensions(comp);
+ let data = vb.get_data_mut().unwrap();
+ let mut line0 = vec![0; stride];
+ let mut line1 = vec![0; stride];
+ swap_plane(&mut data[off..], stride, h, line0.as_mut_slice(), line1.as_mut_slice());
+ }
+ },
+ NABufferType::VideoPacked(ref mut vb) => {
+ let ncomp = vb.get_num_components();
+ for comp in 0..ncomp {
+ let off = vb.get_offset(comp);
+ let stride = vb.get_stride(comp);
+ let (_, h) = vb.get_dimensions(comp);
+ let data = vb.get_data_mut().unwrap();
+ let mut line0 = vec![0; stride];
+ let mut line1 = vec![0; stride];
+ swap_plane(&mut data[off..], stride, h, line0.as_mut_slice(), line1.as_mut_slice());
+ }
+ },
+ _ => { return Err(ScaleError::InvalidArgument); },
+ };
+ Ok(())
+}
+
impl NAScale {
+ /// Constructs a new `NAScale` instance.
pub fn new(fmt_in: ScaleInfo, fmt_out: ScaleInfo) -> ScaleResult<Self> {
let pipeline;
let just_convert = (fmt_in.width == fmt_out.width) && (fmt_in.height == fmt_out.height);
}
Ok(Self { fmt_in, fmt_out, just_convert, pipeline })
}
+ /// Checks whether requested conversion operation is needed at all.
pub fn needs_processing(&self) -> bool { self.pipeline.is_some() }
+ /// Returns the input image format.
pub fn get_in_fmt(&self) -> ScaleInfo { self.fmt_in }
+ /// Returns the output image format.
pub fn get_out_fmt(&self) -> ScaleInfo { self.fmt_out }
+ /// Performs the image format conversion.
pub fn convert(&mut self, pic_in: &NABufferType, pic_out: &mut NABufferType) -> ScaleResult<()> {
let in_info = pic_in.get_video_info();
let out_info = pic_out.get_video_info();
(in_info.get_width() != out_info.get_width() || in_info.get_height() != out_info.get_height()) {
return Err(ScaleError::InvalidArgument);
}
+ let needs_flip = in_info.is_flipped() ^ out_info.is_flipped();
check_format(in_info, &self.fmt_in, self.just_convert)?;
check_format(out_info, &self.fmt_out, self.just_convert)?;
- if let Some(ref mut pipe) = self.pipeline {
- pipe.process(pic_in, pic_out)
- } else {
- copy(pic_in, pic_out);
- Ok(())
+ let ret = if let Some(ref mut pipe) = self.pipeline {
+ pipe.process(pic_in, pic_out)
+ } else {
+ copy(pic_in, pic_out);
+ Ok(())
+ };
+ if ret.is_ok() && needs_flip {
+ flip_picture(pic_out)?;
}
+ ret
}
}