X-Git-Url: https://git.nihav.org/?p=nihav.git;a=blobdiff_plain;f=nihav-core%2Fsrc%2Fscale%2Fmod.rs;h=6bf6e272273887dac36bad6147bd0147e34b88bc;hp=2192b187dfe14c1054baff8c412c1fb5889ea7af;hb=25e0bf9a974b1bb5a8f8b38e23b4a7e91db3eab0;hpb=b36f412c24813b14cb2b1f8fd151863e2a49c1e2 diff --git a/nihav-core/src/scale/mod.rs b/nihav-core/src/scale/mod.rs index 2192b18..6bf6e27 100644 --- a/nihav-core/src/scale/mod.rs +++ b/nihav-core/src/scale/mod.rs @@ -109,9 +109,9 @@ pub fn get_scale_fmt_from_pic(pic: &NABufferType) -> ScaleInfo { } impl Stage { - fn new(name: &str, in_fmt: &ScaleInfo, dest_fmt: &ScaleInfo) -> ScaleResult { + fn new(name: &str, in_fmt: &ScaleInfo, dest_fmt: &ScaleInfo, options: &[(String, String)]) -> ScaleResult { let mut worker = KernelDesc::find(name)?; - let tmp_pic = worker.init(in_fmt, dest_fmt)?; + let tmp_pic = worker.init(in_fmt, dest_fmt, options)?; let fmt_out = get_scale_fmt_from_pic(&tmp_pic); Ok(Self { fmt_out, tmp_pic, next: None, worker }) } @@ -191,6 +191,39 @@ fn copy(pic_in: &NABufferType, pic_out: &mut NABufferType) } } } + } else if let (Some(ref sbuf), Some(ref mut dbuf)) = (pic_in.get_vbuf16(), pic_out.get_vbuf16()) { + let mut same = true; + let num_components = sbuf.get_info().get_format().get_num_comp(); + for i in 0..num_components { + if sbuf.get_stride(i) != dbuf.get_stride(i) { + same = false; + break; + } + if sbuf.get_offset(i) != dbuf.get_offset(i) { + same = false; + break; + } + } + if same { + let sdata = sbuf.get_data(); + let ddata = dbuf.get_data_mut().unwrap(); + ddata.copy_from_slice(&sdata[0..]); + } else { + let sdata = sbuf.get_data(); + for comp in 0..num_components { + let (_, h) = sbuf.get_dimensions(comp); + let src = &sdata[sbuf.get_offset(comp)..]; + let sstride = sbuf.get_stride(comp); + let doff = dbuf.get_offset(comp); + let dstride = dbuf.get_stride(comp); + let ddata = dbuf.get_data_mut().unwrap(); + let dst = &mut ddata[doff..]; + let copy_size = sstride.min(dstride); + for (dline, sline) in dst.chunks_exact_mut(dstride).take(h).zip(src.chunks_exact(sstride)) { + (&mut dline[..copy_size]).copy_from_slice(&sline[..copy_size]); + } + } + } } else { unimplemented!(); } @@ -217,13 +250,32 @@ fn is_better_fmt(a: &ScaleInfo, b: &ScaleInfo) -> bool { } false } -fn build_pipeline(ifmt: &ScaleInfo, ofmt: &ScaleInfo, just_convert: bool) -> ScaleResult> { +fn fmt_needs_scale(ifmt: &NAPixelFormaton, ofmt: &NAPixelFormaton) -> bool { + for (ichr, ochr) in ifmt.comp_info.iter().zip(ofmt.comp_info.iter()) { + if let (Some(ic), Some(oc)) = (ichr, ochr) { + if ic.h_ss != oc.h_ss || ic.v_ss != oc.v_ss { + return true; + } + } + } + false +} +fn build_pipeline(ifmt: &ScaleInfo, ofmt: &ScaleInfo, just_convert: bool, options: &[(String, String)]) -> ScaleResult> { + let mut debug = false; + for (name, value) in options.iter() { + if name == "debug" && (value == "" || value == "true") { + debug = true; + break; + } + } + let inname = ifmt.fmt.get_model().get_short_name(); let outname = ofmt.fmt.get_model().get_short_name(); -println!("convert {} -> {}", ifmt, ofmt); - let needs_scale = if (ofmt.fmt.get_max_subsampling() > 0) && - (ofmt.fmt.get_max_subsampling() != ifmt.fmt.get_max_subsampling()) { + if debug { + println!("convert {} -> {}", ifmt, ofmt); + } + let needs_scale = if fmt_needs_scale(&ifmt.fmt, &ofmt.fmt) { true } else { !just_convert @@ -240,46 +292,60 @@ println!("convert {} -> {}", ifmt, ofmt); let mut cur_fmt = *ifmt; if needs_unpack { -println!("[adding unpack]"); + if debug { + println!("[adding unpack]"); + } let new_stage = if !cur_fmt.fmt.is_paletted() { - Stage::new("unpack", &cur_fmt, &ofmt)? + Stage::new("unpack", &cur_fmt, &ofmt, options)? } else { - Stage::new("depal", &cur_fmt, &ofmt)? + Stage::new("depal", &cur_fmt, &ofmt, options)? }; cur_fmt = new_stage.fmt_out; add_stage!(stages, new_stage); } if needs_scale && scale_before_cvt { -println!("[adding scale]"); - let new_stage = Stage::new("scale", &cur_fmt, &ofmt)?; + if debug { + println!("[adding scale]"); + } + let new_stage = Stage::new("scale", &cur_fmt, &ofmt, options)?; cur_fmt = new_stage.fmt_out; add_stage!(stages, new_stage); } if needs_convert { -println!("[adding convert]"); + if debug { + println!("[adding convert]"); + } let cvtname = format!("{}_to_{}", inname, outname); -println!("[{}]", cvtname); - let new_stage = Stage::new(&cvtname, &cur_fmt, &ofmt)?; + if debug { + println!("[{}]", cvtname); + } + let new_stage = Stage::new(&cvtname, &cur_fmt, &ofmt, options)?; //todo if fails try converting via RGB or YUV cur_fmt = new_stage.fmt_out; add_stage!(stages, new_stage); //todo alpha plane copy/add } if needs_scale && !scale_before_cvt { -println!("[adding scale]"); - let new_stage = Stage::new("scale", &cur_fmt, &ofmt)?; + if debug { + println!("[adding scale]"); + } + let new_stage = Stage::new("scale", &cur_fmt, &ofmt, options)?; cur_fmt = new_stage.fmt_out; add_stage!(stages, new_stage); } if needs_pack && !needs_palettise { -println!("[adding pack]"); - let new_stage = Stage::new("pack", &cur_fmt, &ofmt)?; + if debug { + println!("[adding pack]"); + } + let new_stage = Stage::new("pack", &cur_fmt, &ofmt, options)?; //cur_fmt = new_stage.fmt_out; add_stage!(stages, new_stage); } if needs_palettise { -println!("[adding palettise]"); - let new_stage = Stage::new("palette", &cur_fmt, &ofmt)?; + if debug { + println!("[adding palettise]"); + } + let new_stage = Stage::new("palette", &cur_fmt, &ofmt, options)?; //cur_fmt = new_stage.fmt_out; add_stage!(stages, new_stage); } @@ -320,7 +386,7 @@ pub fn flip_picture(pic: &mut NABufferType) -> ScaleResult<()> { } }, NABufferType::Video16(ref mut vb) => { - let ncomp = vb.get_num_components(); + let ncomp = vb.get_num_components().max(1); for comp in 0..ncomp { let off = vb.get_offset(comp); let stride = vb.get_stride(comp); @@ -332,7 +398,7 @@ pub fn flip_picture(pic: &mut NABufferType) -> ScaleResult<()> { } }, NABufferType::Video32(ref mut vb) => { - let ncomp = vb.get_num_components(); + let ncomp = vb.get_num_components().max(1); for comp in 0..ncomp { let off = vb.get_offset(comp); let stride = vb.get_stride(comp); @@ -354,6 +420,15 @@ pub fn flip_picture(pic: &mut NABufferType) -> ScaleResult<()> { let mut line1 = vec![0; stride]; swap_plane(&mut data[off..], stride, h, line0.as_mut_slice(), line1.as_mut_slice()); } + if ncomp == 0 && vb.get_stride(0) != 0 { + let off = vb.get_offset(0); + let stride = vb.get_stride(0); + let (_, h) = vb.get_dimensions(0); + let data = vb.get_data_mut().unwrap(); + let mut line0 = vec![0; stride]; + let mut line1 = vec![0; stride]; + swap_plane(&mut data[off..], stride, h, line0.as_mut_slice(), line1.as_mut_slice()); + } }, _ => { return Err(ScaleError::InvalidArgument); }, }; @@ -366,7 +441,18 @@ impl NAScale { let pipeline; let just_convert = (fmt_in.width == fmt_out.width) && (fmt_in.height == fmt_out.height); if fmt_in != fmt_out { - pipeline = build_pipeline(&fmt_in, &fmt_out, just_convert)?; + pipeline = build_pipeline(&fmt_in, &fmt_out, just_convert, &[])?; + } else { + pipeline = None; + } + Ok(Self { fmt_in, fmt_out, just_convert, pipeline }) + } + /// Constructs a new `NAScale` instance taking into account provided options. + pub fn new_with_options(fmt_in: ScaleInfo, fmt_out: ScaleInfo, options: &[(String, String)]) -> ScaleResult { + let pipeline; + let just_convert = (fmt_in.width == fmt_out.width) && (fmt_in.height == fmt_out.height); + if fmt_in != fmt_out { + pipeline = build_pipeline(&fmt_in, &fmt_out, just_convert, options)?; } else { pipeline = None; } @@ -488,9 +574,9 @@ mod test { let uoff = obuf.get_offset(1); let voff = obuf.get_offset(2); let odata = obuf.get_data(); - assert_eq!(odata[yoff], 28); - assert_eq!(odata[uoff], 154); - assert_eq!(odata[voff], 103); + assert_eq!(odata[yoff], 11); + assert_eq!(odata[uoff], 162); + assert_eq!(odata[voff], 118); } #[test] fn test_scale_and_convert_to_pal() { @@ -508,7 +594,7 @@ mod test { let odata = obuf.get_data(); assert_eq!(odata[dataoff], 0); assert_eq!(odata[paloff], 157); - assert_eq!(odata[paloff + 1], 99); + assert_eq!(odata[paloff + 1], 129); assert_eq!(odata[paloff + 2], 170); } }