X-Git-Url: https://git.nihav.org/?p=nihav.git;a=blobdiff_plain;f=nihav-core%2Fsrc%2Fscale%2Fmod.rs;fp=nihav-core%2Fsrc%2Fscale%2Fmod.rs;h=c569b2f680f0f84a4f5b80e19c7bafbe967f3bcc;hp=0000000000000000000000000000000000000000;hb=03accf760cf79bc56d7c6dc6e82cd885fb7e1e13;hpb=c7d8d94809fd5cdebc191c2ff86aa9c0c2d77dbf diff --git a/nihav-core/src/scale/mod.rs b/nihav-core/src/scale/mod.rs new file mode 100644 index 0000000..c569b2f --- /dev/null +++ b/nihav-core/src/scale/mod.rs @@ -0,0 +1,348 @@ +use crate::frame::*; + +mod kernel; + +mod colorcvt; +mod repack; +mod scale; + +#[derive(Clone,Copy,PartialEq)] +pub struct ScaleInfo { + pub fmt: NAPixelFormaton, + pub width: usize, + pub height: usize, +} + +impl std::fmt::Display for ScaleInfo { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + write!(f, "({}x{}, {})", self.width, self.height, self.fmt) + } +} + +#[derive(Debug,Clone,Copy,PartialEq)] +#[allow(dead_code)] +pub enum ScaleError { + NoFrame, + AllocError, + InvalidArgument, + NotImplemented, + Bug, +} + +pub type ScaleResult = Result; + +/*trait Kernel { + fn init(&mut self, in_fmt: &ScaleInfo, dest_fmt: &ScaleInfo) -> ScaleResult; + fn process(&mut self, pic_in: &NABufferType, pic_out: &mut NABufferType); +}*/ + +struct KernelDesc { + name: &'static str, + create: fn () -> Box, +} + +impl KernelDesc { + fn find(name: &str) -> ScaleResult> { + for kern in KERNELS.iter() { + if kern.name == name { + return Ok((kern.create)()); + } + } + Err(ScaleError::InvalidArgument) + } +} + +const KERNELS: &[KernelDesc] = &[ + KernelDesc { name: "pack", create: repack::create_pack }, + KernelDesc { name: "unpack", create: repack::create_unpack }, + KernelDesc { name: "depal", create: repack::create_depal }, + KernelDesc { name: "scale", create: scale::create_scale }, + KernelDesc { name: "rgb_to_yuv", create: colorcvt::create_rgb2yuv }, + KernelDesc { name: "yuv_to_rgb", create: colorcvt::create_yuv2rgb }, +]; + +struct Stage { + fmt_out: ScaleInfo, + tmp_pic: NABufferType, + next: Option>, + worker: Box, +} + +pub fn get_scale_fmt_from_pic(pic: &NABufferType) -> ScaleInfo { + let info = pic.get_video_info().unwrap(); + ScaleInfo { fmt: info.get_format(), width: info.get_width(), height: info.get_height() } +} + +impl Stage { + fn new(name: &str, in_fmt: &ScaleInfo, dest_fmt: &ScaleInfo) -> ScaleResult { + let mut worker = KernelDesc::find(name)?; + let tmp_pic = worker.init(in_fmt, dest_fmt)?; + let fmt_out = get_scale_fmt_from_pic(&tmp_pic); + Ok(Self { fmt_out, tmp_pic, next: None, worker }) + } + fn add(&mut self, new: Stage) { + if let Some(ref mut next) = self.next { + next.add(new); + } else { + self.next = Some(Box::new(new)); + } + } + fn process(&mut self, pic_in: &NABufferType, pic_out: &mut NABufferType) -> ScaleResult<()> { + if let Some(ref mut nextstage) = self.next { + self.worker.process(pic_in, &mut self.tmp_pic); + nextstage.process(&self.tmp_pic, pic_out)?; + } else { + self.worker.process(pic_in, pic_out); + } + Ok(()) + } + fn drop_last_tmp(&mut self) { + if let Some(ref mut nextstage) = self.next { + nextstage.drop_last_tmp(); + } else { + self.tmp_pic = NABufferType::None; + } + } +} + +pub struct NAScale { + fmt_in: ScaleInfo, + fmt_out: ScaleInfo, + just_convert: bool, + pipeline: Option, +} + +fn check_format(in_fmt: NAVideoInfo, ref_fmt: &ScaleInfo, just_convert: bool) -> ScaleResult<()> { + if in_fmt.get_format() != ref_fmt.fmt { return Err(ScaleError::InvalidArgument); } + if !just_convert && (in_fmt.get_width() != ref_fmt.width || in_fmt.get_height() != ref_fmt.height) { + return Err(ScaleError::InvalidArgument); + } + Ok(()) +} + +fn copy(pic_in: &NABufferType, pic_out: &mut NABufferType) +{ + if let (Some(ref sbuf), Some(ref mut dbuf)) = (pic_in.get_vbuf(), pic_out.get_vbuf()) { + let sdata = sbuf.get_data(); + let ddata = dbuf.get_data_mut().unwrap(); + ddata.copy_from_slice(&sdata[0..]); + } else { + unimplemented!(); + } +} + +macro_rules! add_stage { + ($head:expr, $new:expr) => { + if let Some(ref mut h) = $head { + h.add($new); + } else { + $head = Some($new); + } + }; +} +fn is_better_fmt(a: &ScaleInfo, b: &ScaleInfo) -> bool { + if (a.width >= b.width) && (a.height >= b.height) { + return true; + } + if a.fmt.get_max_depth() > b.fmt.get_max_depth() { + return true; + } + if a.fmt.get_max_subsampling() < b.fmt.get_max_subsampling() { + return true; + } + false +} +fn build_pipeline(ifmt: &ScaleInfo, ofmt: &ScaleInfo, just_convert: bool) -> ScaleResult> { + let inname = ifmt.fmt.get_model().get_short_name(); + let outname = ofmt.fmt.get_model().get_short_name(); + +println!("convert {} -> {}", ifmt, ofmt); + let mut needs_scale = !just_convert; + if (ofmt.fmt.get_max_subsampling() > 0) && + (ofmt.fmt.get_max_subsampling() != ifmt.fmt.get_max_subsampling()) { + needs_scale = true; + } + let needs_unpack = needs_scale || !ifmt.fmt.is_unpacked(); + let needs_pack = !ofmt.fmt.is_unpacked(); + let mut needs_convert = false; + if inname != outname { + needs_convert = true; + } + let scale_before_cvt = is_better_fmt(&ifmt, &ofmt) && needs_convert + && (ofmt.fmt.get_max_subsampling() == 0); +//todo stages for model and gamma conversion + + let mut stages: Option = None; + let mut cur_fmt = *ifmt; + + if needs_unpack { +println!("[adding unpack]"); + let new_stage; + if !cur_fmt.fmt.is_paletted() { + new_stage = Stage::new("unpack", &cur_fmt, &ofmt)?; + } else { + new_stage = Stage::new("depal", &cur_fmt, &ofmt)?; + } + cur_fmt = new_stage.fmt_out; + add_stage!(stages, new_stage); + } + if needs_scale && scale_before_cvt { +println!("[adding scale]"); + let new_stage = Stage::new("scale", &cur_fmt, &ofmt)?; + cur_fmt = new_stage.fmt_out; + add_stage!(stages, new_stage); + } + if needs_convert { +println!("[adding convert]"); + let cvtname = format!("{}_to_{}", inname, outname); +println!("[{}]", cvtname); + let new_stage = Stage::new(&cvtname, &cur_fmt, &ofmt)?; +//todo if fails try converting via RGB or YUV + cur_fmt = new_stage.fmt_out; + add_stage!(stages, new_stage); +//todo alpha plane copy/add + } + if needs_scale && !scale_before_cvt { +println!("[adding scale]"); + let new_stage = Stage::new("scale", &cur_fmt, &ofmt)?; + cur_fmt = new_stage.fmt_out; + add_stage!(stages, new_stage); + } +//todo flip if needed + if needs_pack { +println!("[adding pack]"); + let new_stage = Stage::new("pack", &cur_fmt, &ofmt)?; + //cur_fmt = new_stage.fmt_out; + add_stage!(stages, new_stage); + } + + if let Some(ref mut head) = stages { + head.drop_last_tmp(); + } + + Ok(stages) +} + +impl NAScale { + pub fn new(fmt_in: ScaleInfo, fmt_out: ScaleInfo) -> ScaleResult { + let pipeline; + let just_convert = (fmt_in.width == fmt_out.width) && (fmt_in.height == fmt_out.height); + if fmt_in != fmt_out { + pipeline = build_pipeline(&fmt_in, &fmt_out, just_convert)?; + } else { + pipeline = None; + } + Ok(Self { fmt_in, fmt_out, just_convert, pipeline }) + } + pub fn needs_processing(&self) -> bool { self.pipeline.is_some() } + pub fn get_in_fmt(&self) -> ScaleInfo { self.fmt_in } + pub fn get_out_fmt(&self) -> ScaleInfo { self.fmt_out } + pub fn convert(&mut self, pic_in: &NABufferType, pic_out: &mut NABufferType) -> ScaleResult<()> { + let in_info = pic_in.get_video_info(); + let out_info = pic_out.get_video_info(); + if in_info.is_none() || out_info.is_none() { return Err(ScaleError::InvalidArgument); } + let in_info = in_info.unwrap(); + let out_info = out_info.unwrap(); + if self.just_convert && + (in_info.get_width() != out_info.get_width() || in_info.get_height() != out_info.get_height()) { + return Err(ScaleError::InvalidArgument); + } + check_format(in_info, &self.fmt_in, self.just_convert)?; + check_format(out_info, &self.fmt_out, self.just_convert)?; + if let Some(ref mut pipe) = self.pipeline { + pipe.process(pic_in, pic_out) + } else { + copy(pic_in, pic_out); + Ok(()) + } + } +} + +#[cfg(test)] +mod test { + use super::*; + + fn fill_pic(pic: &mut NABufferType, val: u8) { + if let Some(ref mut buf) = pic.get_vbuf() { + let data = buf.get_data_mut().unwrap(); + for el in data.iter_mut() { *el = val; } + } else if let Some(ref mut buf) = pic.get_vbuf16() { + let data = buf.get_data_mut().unwrap(); + for el in data.iter_mut() { *el = val as u16; } + } else if let Some(ref mut buf) = pic.get_vbuf32() { + let data = buf.get_data_mut().unwrap(); + for el in data.iter_mut() { *el = (val as u32) * 0x01010101; } + } + } + #[test] + fn test_convert() { + let mut in_pic = alloc_video_buffer(NAVideoInfo::new(1, 1, false, RGB565_FORMAT), 3).unwrap(); + fill_pic(&mut in_pic, 42); + let mut out_pic = alloc_video_buffer(NAVideoInfo::new(1, 1, false, RGB24_FORMAT), 3).unwrap(); + fill_pic(&mut out_pic, 0); + let ifmt = get_scale_fmt_from_pic(&in_pic); + let ofmt = get_scale_fmt_from_pic(&out_pic); + let mut scaler = NAScale::new(ifmt, ofmt).unwrap(); + scaler.convert(&in_pic, &mut out_pic).unwrap(); + let obuf = out_pic.get_vbuf().unwrap(); + let odata = obuf.get_data(); + assert_eq!(odata[0], 0x0); + assert_eq!(odata[1], 0x4); + assert_eq!(odata[2], 0x52); + + let mut in_pic = alloc_video_buffer(NAVideoInfo::new(4, 4, false, RGB24_FORMAT), 3).unwrap(); + fill_pic(&mut in_pic, 42); + let mut out_pic = alloc_video_buffer(NAVideoInfo::new(4, 4, false, YUV420_FORMAT), 3).unwrap(); + fill_pic(&mut out_pic, 0); + let ifmt = get_scale_fmt_from_pic(&in_pic); + let ofmt = get_scale_fmt_from_pic(&out_pic); + let mut scaler = NAScale::new(ifmt, ofmt).unwrap(); + scaler.convert(&in_pic, &mut out_pic).unwrap(); + let obuf = out_pic.get_vbuf().unwrap(); + let yoff = obuf.get_offset(0); + let uoff = obuf.get_offset(1); + let voff = obuf.get_offset(2); + let odata = obuf.get_data(); + assert_eq!(odata[yoff], 42); + assert!(((odata[uoff] ^ 0x80) as i8).abs() <= 1); + assert!(((odata[voff] ^ 0x80) as i8).abs() <= 1); + let mut scaler = NAScale::new(ofmt, ifmt).unwrap(); + scaler.convert(&out_pic, &mut in_pic).unwrap(); + let obuf = in_pic.get_vbuf().unwrap(); + let odata = obuf.get_data(); + assert_eq!(odata[0], 42); + } + #[test] + fn test_scale() { + let mut in_pic = alloc_video_buffer(NAVideoInfo::new(2, 2, false, RGB565_FORMAT), 3).unwrap(); + fill_pic(&mut in_pic, 42); + let mut out_pic = alloc_video_buffer(NAVideoInfo::new(3, 3, false, RGB565_FORMAT), 3).unwrap(); + fill_pic(&mut out_pic, 0); + let ifmt = get_scale_fmt_from_pic(&in_pic); + let ofmt = get_scale_fmt_from_pic(&out_pic); + let mut scaler = NAScale::new(ifmt, ofmt).unwrap(); + scaler.convert(&in_pic, &mut out_pic).unwrap(); + let obuf = out_pic.get_vbuf16().unwrap(); + let odata = obuf.get_data(); + assert_eq!(odata[0], 42); + } + #[test] + fn test_scale_and_convert() { + let mut in_pic = alloc_video_buffer(NAVideoInfo::new(7, 3, false, RGB565_FORMAT), 3).unwrap(); + fill_pic(&mut in_pic, 42); + let mut out_pic = alloc_video_buffer(NAVideoInfo::new(4, 4, false, YUV420_FORMAT), 3).unwrap(); + fill_pic(&mut out_pic, 0); + let ifmt = get_scale_fmt_from_pic(&in_pic); + let ofmt = get_scale_fmt_from_pic(&out_pic); + let mut scaler = NAScale::new(ifmt, ofmt).unwrap(); + scaler.convert(&in_pic, &mut out_pic).unwrap(); + let obuf = out_pic.get_vbuf().unwrap(); + let yoff = obuf.get_offset(0); + let uoff = obuf.get_offset(1); + let voff = obuf.get_offset(2); + let odata = obuf.get_data(); + assert_eq!(odata[yoff], 28); + assert_eq!(odata[uoff], 154); + assert_eq!(odata[voff], 103); + } +}