use super::*;
use super::kernel::Kernel;
+const DEFAULT_YUV: usize = 4;
+
const YUV_PARAMS: &[[f32; 2]] = &[
[ 0.333, 0.333 ], // RGB
[ 0.2126, 0.0722 ], // ITU-R BT709
[ 0.2627, 0.0593 ], // ITU-R BT2020
];
+fn parse_yuv_mat(name: &str) -> usize {
+ match name {
+ "rgb" => 0,
+ "bt709" => 1,
+ "bt601" => 4,
+ "bt470" => 5,
+ "smpte170m" => 6,
+ "smpte240m" => 7,
+ "ycocg" => 8,
+ "bt2020" => 9,
+ _ => 2,
+ }
+}
+
+/*fn get_yuv_mat(id: usize) -> &'static str {
+ match id {
+ 1 => "bt709",
+ 4 => "bt601",
+ 5 => "bt470",
+ 6 => "smpte170m",
+ 7 => "smpte240m",
+ 8 => "ycocg",
+ 9 => "bt2020",
+ _ => "rgb",
+ }
+}*/
+
const BT_PAL_COEFFS: [f32; 2] = [ 0.493, 0.877 ];
const SMPTE_NTSC_COEFFS: &[f32; 4] = &[ -0.268, 0.7358, 0.4127, 0.4778 ];
#[derive(Default)]
struct RgbToYuv {
matrix: [[f32; 3]; 3],
+ mode: usize,
}
impl RgbToYuv {
#[allow(clippy::many_single_char_names)]
impl Kernel for RgbToYuv {
- fn init(&mut self, in_fmt: &ScaleInfo, dest_fmt: &ScaleInfo) -> ScaleResult<NABufferType> {
+ fn init(&mut self, in_fmt: &ScaleInfo, dest_fmt: &ScaleInfo, options: &[(String, String)]) -> ScaleResult<NABufferType> {
+ let mut debug = false;
+ let mut mode = DEFAULT_YUV;
+ for (name, value) in options.iter() {
+ match (name.as_str(), value.as_str()) {
+ ("debug", "") => { debug = true; },
+ ("debug", "true") => { debug = true; },
+ ("rgb2yuv.mode", ymode) => {
+ mode = parse_yuv_mat(ymode);
+ },
+ _ => {},
+ }
+ }
+ self.mode = mode;
+
let mut df = dest_fmt.fmt;
-//todo coeff selection
- make_rgb2yuv(YUV_PARAMS[2][0], YUV_PARAMS[2][1], &mut self.matrix);
+ make_rgb2yuv(YUV_PARAMS[mode][0], YUV_PARAMS[mode][1], &mut self.matrix);
if let ColorModel::YUV(yuvsm) = df.get_model() {
match yuvsm {
YUVSubmodel::YCbCr => {},
chr.v_ss = 0;
}
}
-println!(" [intermediate format {}]", df);
+ if debug {
+ println!(" [intermediate format {}]", df);
+ }
let res = alloc_video_buffer(NAVideoInfo::new(in_fmt.width, in_fmt.height, false, df), 3);
if res.is_err() { return Err(ScaleError::AllocError); }
Ok(res.unwrap())
let (y, u, v) = matrix_mul(&self.matrix, r, g, b);
dst[yoff + x] = (y as i16).max(0).min(255) as u8;
- dst[uoff + x] = ((u as i16).max(-128).min(128) + 128) as u8;
- dst[voff + x] = ((v as i16).max(-128).min(128) + 128) as u8;
+ dst[uoff + x] = ((u as i16).max(-128).min(127) + 128) as u8;
+ dst[voff + x] = ((v as i16).max(-128).min(127) + 128) as u8;
}
roff += istrides[0];
goff += istrides[1];
#[derive(Default)]
struct YuvToRgb {
matrix: [[f32; 3]; 3],
+ mode: usize,
yscale: Vec<i16>,
r_chr: Vec<i16>,
g_u: Vec<i16>,
#[allow(clippy::many_single_char_names)]
impl Kernel for YuvToRgb {
- fn init(&mut self, in_fmt: &ScaleInfo, dest_fmt: &ScaleInfo) -> ScaleResult<NABufferType> {
+ fn init(&mut self, in_fmt: &ScaleInfo, dest_fmt: &ScaleInfo, options: &[(String, String)]) -> ScaleResult<NABufferType> {
+ let mut debug = false;
+ let mut mode = DEFAULT_YUV;
+ for (name, value) in options.iter() {
+ match (name.as_str(), value.as_str()) {
+ ("debug", "") => { debug = true; },
+ ("debug", "true") => { debug = true; },
+ ("yuv2rgb.mode", ymode) => {
+ mode = parse_yuv_mat(ymode);
+ },
+ _ => {},
+ }
+ }
+ self.mode = mode;
+
let mut df = dest_fmt.fmt;
df.palette = false;
-//todo coeff selection
- make_yuv2rgb(YUV_PARAMS[2][0], YUV_PARAMS[2][1], &mut self.matrix);
+ if !df.is_unpacked() || df.get_max_depth() != 8 || df.get_total_depth() != df.get_num_comp() as u8 * 8 {
+ df = NAPixelFormaton {
+ model: ColorModel::RGB(RGBSubmodel::RGB), components: 3,
+ comp_info: [
+ Some(NAPixelChromaton{ h_ss: 0, v_ss: 0, packed: false, depth: 8, shift: 0, comp_offs: 0, next_elem: 1 }),
+ Some(NAPixelChromaton{ h_ss: 0, v_ss: 0, packed: false, depth: 8, shift: 0, comp_offs: 1, next_elem: 1 }),
+ Some(NAPixelChromaton{ h_ss: 0, v_ss: 0, packed: false, depth: 8, shift: 0, comp_offs: 2, next_elem: 1 }),
+ None, None],
+ elem_size: 3, be: false, alpha: false, palette: false };
+ if in_fmt.fmt.alpha && dest_fmt.fmt.alpha {
+ df.alpha = true;
+ df.components = 4;
+ df.comp_info[3] = Some(NAPixelChromaton{ h_ss: 0, v_ss: 0, packed: false, depth: 8, shift: 0, comp_offs: 3, next_elem: 1 });
+ }
+ }
+ make_yuv2rgb(YUV_PARAMS[mode][0], YUV_PARAMS[mode][1], &mut self.matrix);
if let ColorModel::YUV(yuvsm) = in_fmt.fmt.get_model() {
match yuvsm {
YUVSubmodel::YCbCr => {},
YUVSubmodel::YIQ => {
- make_rgb2yuv(YUV_PARAMS[2][0], YUV_PARAMS[2][1], &mut self.matrix);
+ make_rgb2yuv(YUV_PARAMS[DEFAULT_YUV][0], YUV_PARAMS[DEFAULT_YUV][1], &mut self.matrix);
apply_ntsc_rgb2yiq(SMPTE_NTSC_COEFFS, &mut self.matrix);
invert_matrix(&mut self.matrix);
},
chr.comp_offs = i as u8;
}
}
-println!(" [intermediate format {}]", df);
+ if debug {
+ println!(" [intermediate format {}]", df);
+ }
let res = alloc_video_buffer(NAVideoInfo::new(in_fmt.width, in_fmt.height, false, df), 3);
if res.is_err() { return Err(ScaleError::AllocError); }
Ok(res.unwrap())
let mut voff = sbuf.get_offset(2);
let src = sbuf.get_data();
let dst = dbuf.get_data_mut().unwrap();
- if self.yscale.len() > 0 {
+ if !self.yscale.is_empty() {
for y in 0..h {
for x in 0..w {
let y = self.yscale[src[yoff + x] as usize];
let mut yoff = sbuf.get_offset(0);
let src = sbuf.get_data();
let dst = dbuf.get_data_mut().unwrap();
- if self.yscale.len() > 0 {
+ if !self.yscale.is_empty() {
for _y in 0..h {
for x in 0..w {
let y = self.yscale[src[yoff + x] as usize];