mod colorcvt;
mod repack;
+#[allow(clippy::module_inception)]
mod scale;
+mod palette;
+
+pub use crate::scale::palette::{palettise_frame, QuantisationMode, PaletteSearchMode};
+
/// Image format information used by the converter.
#[derive(Clone,Copy,PartialEq)]
pub struct ScaleInfo {
KernelDesc { name: "pack", create: repack::create_pack },
KernelDesc { name: "unpack", create: repack::create_unpack },
KernelDesc { name: "depal", create: repack::create_depal },
+ KernelDesc { name: "palette", create: palette::create_palettise },
KernelDesc { name: "scale", create: scale::create_scale },
KernelDesc { name: "rgb_to_yuv", create: colorcvt::create_rgb2yuv },
KernelDesc { name: "yuv_to_rgb", create: colorcvt::create_yuv2rgb },
}
}
}
+ } else if let (Some(ref sbuf), Some(ref mut dbuf)) = (pic_in.get_vbuf16(), pic_out.get_vbuf16()) {
+ let mut same = true;
+ let num_components = sbuf.get_info().get_format().get_num_comp();
+ for i in 0..num_components {
+ if sbuf.get_stride(i) != dbuf.get_stride(i) {
+ same = false;
+ break;
+ }
+ if sbuf.get_offset(i) != dbuf.get_offset(i) {
+ same = false;
+ break;
+ }
+ }
+ if same {
+ let sdata = sbuf.get_data();
+ let ddata = dbuf.get_data_mut().unwrap();
+ ddata.copy_from_slice(&sdata[0..]);
+ } else {
+ let sdata = sbuf.get_data();
+ for comp in 0..num_components {
+ let (_, h) = sbuf.get_dimensions(comp);
+ let src = &sdata[sbuf.get_offset(comp)..];
+ let sstride = sbuf.get_stride(comp);
+ let doff = dbuf.get_offset(comp);
+ let dstride = dbuf.get_stride(comp);
+ let ddata = dbuf.get_data_mut().unwrap();
+ let dst = &mut ddata[doff..];
+ let copy_size = sstride.min(dstride);
+ for (dline, sline) in dst.chunks_exact_mut(dstride).take(h).zip(src.chunks_exact(sstride)) {
+ (&mut dline[..copy_size]).copy_from_slice(&sline[..copy_size]);
+ }
+ }
+ }
} else {
unimplemented!();
}
let needs_convert = inname != outname;
let scale_before_cvt = is_better_fmt(&ifmt, &ofmt) && needs_convert
&& (ofmt.fmt.get_max_subsampling() == 0);
+ let needs_palettise = ofmt.fmt.palette;
//todo stages for model and gamma conversion
let mut stages: Option<Stage> = None;
cur_fmt = new_stage.fmt_out;
add_stage!(stages, new_stage);
}
- if needs_pack {
+ if needs_pack && !needs_palettise {
println!("[adding pack]");
let new_stage = Stage::new("pack", &cur_fmt, &ofmt)?;
//cur_fmt = new_stage.fmt_out;
add_stage!(stages, new_stage);
}
+ if needs_palettise {
+println!("[adding palettise]");
+ let new_stage = Stage::new("palette", &cur_fmt, &ofmt)?;
+ //cur_fmt = new_stage.fmt_out;
+ add_stage!(stages, new_stage);
+ }
if let Some(ref mut head) = stages {
head.drop_last_tmp();
}
},
NABufferType::Video16(ref mut vb) => {
- let ncomp = vb.get_num_components();
+ let ncomp = vb.get_num_components().max(1);
for comp in 0..ncomp {
let off = vb.get_offset(comp);
let stride = vb.get_stride(comp);
}
},
NABufferType::Video32(ref mut vb) => {
- let ncomp = vb.get_num_components();
+ let ncomp = vb.get_num_components().max(1);
for comp in 0..ncomp {
let off = vb.get_offset(comp);
let stride = vb.get_stride(comp);
assert_eq!(odata[uoff], 154);
assert_eq!(odata[voff], 103);
}
+ #[test]
+ fn test_scale_and_convert_to_pal() {
+ let mut in_pic = alloc_video_buffer(NAVideoInfo::new(7, 3, false, YUV420_FORMAT), 3).unwrap();
+ fill_pic(&mut in_pic, 142);
+ let mut out_pic = alloc_video_buffer(NAVideoInfo::new(4, 4, false, PAL8_FORMAT), 0).unwrap();
+ fill_pic(&mut out_pic, 0);
+ let ifmt = get_scale_fmt_from_pic(&in_pic);
+ let ofmt = get_scale_fmt_from_pic(&out_pic);
+ let mut scaler = NAScale::new(ifmt, ofmt).unwrap();
+ scaler.convert(&in_pic, &mut out_pic).unwrap();
+ let obuf = out_pic.get_vbuf().unwrap();
+ let dataoff = obuf.get_offset(0);
+ let paloff = obuf.get_offset(1);
+ let odata = obuf.get_data();
+ assert_eq!(odata[dataoff], 0);
+ assert_eq!(odata[paloff], 157);
+ assert_eq!(odata[paloff + 1], 99);
+ assert_eq!(odata[paloff + 2], 170);
+ }
}