X-Git-Url: https://git.nihav.org/?a=blobdiff_plain;ds=sidebyside;f=nihav-core%2Fsrc%2Ftest%2Fdec_video.rs;h=d000e527af733628185e4254ba13ed200a54f989;hb=01613464323864a655c994820d3c43df1954e3b2;hp=538a0ef82edf8ffd272495e6aad72270f3f45a65;hpb=171860fcc4a4ba3ec28bc4b720b9f582377be4cf;p=nihav.git diff --git a/nihav-core/src/test/dec_video.rs b/nihav-core/src/test/dec_video.rs index 538a0ef..d000e52 100644 --- a/nihav-core/src/test/dec_video.rs +++ b/nihav-core/src/test/dec_video.rs @@ -4,6 +4,7 @@ use crate::frame::*; use crate::codecs::*; use crate::demuxers::*; //use crate::io::byteio::*; +use crate::scale::*; use super::wavwriter::WavWriter; fn write_pgmyuv(pfx: &str, strno: usize, num: u64, frm: NAFrameRef) { @@ -123,66 +124,23 @@ fn write_palppm(pfx: &str, strno: usize, num: u64, frm: NAFrameRef) { fn write_ppm(pfx: &str, strno: usize, num: u64, frm: NAFrameRef) { let name = format!("assets/{}out{:02}_{:06}.ppm", pfx, strno, num); let mut ofile = File::create(name).unwrap(); - if let NABufferType::VideoPacked(ref buf) = frm.get_buffer() { + let info = frm.get_buffer().get_video_info().unwrap(); + let mut dpic = alloc_video_buffer(NAVideoInfo::new(info.get_width(), info.get_height(), false, RGB24_FORMAT), 0).unwrap(); + let ifmt = ScaleInfo { width: info.get_width(), height: info.get_height(), fmt: info.get_format() }; + let ofmt = ScaleInfo { width: info.get_width(), height: info.get_height(), fmt: RGB24_FORMAT }; + let mut scaler = NAScale::new(ifmt, ofmt).unwrap(); + scaler.convert(&frm.get_buffer(), &mut dpic).unwrap(); + let buf = dpic.get_vbuf().unwrap(); let (w, h) = buf.get_dimensions(0); let hdr = format!("P6\n{} {}\n255\n", w, h); ofile.write_all(hdr.as_bytes()).unwrap(); let dta = buf.get_data(); let stride = buf.get_stride(0); - let offs: [usize; 3] = [ - buf.get_info().get_format().get_chromaton(0).unwrap().get_offset() as usize, - buf.get_info().get_format().get_chromaton(1).unwrap().get_offset() as usize, - buf.get_info().get_format().get_chromaton(2).unwrap().get_offset() as usize - ]; - let step = buf.get_info().get_format().get_elem_size() as usize; let mut line: Vec = Vec::with_capacity(w * 3); line.resize(w * 3, 0); for src in dta.chunks(stride) { - for x in 0..w { - line[x * 3 + 0] = src[x * step + offs[0]]; - line[x * 3 + 1] = src[x * step + offs[1]]; - line[x * 3 + 2] = src[x * step + offs[2]]; - } - ofile.write_all(line.as_slice()).unwrap(); + ofile.write_all(&src[0..w*3]).unwrap(); } - } else if let NABufferType::Video16(ref buf) = frm.get_buffer() { - let (w, h) = buf.get_dimensions(0); - let hdr = format!("P6\n{} {}\n255\n", w, h); - ofile.write_all(hdr.as_bytes()).unwrap(); - let dta = buf.get_data(); - let stride = buf.get_stride(0); - let depths: [u8; 3] = [ - buf.get_info().get_format().get_chromaton(0).unwrap().get_depth(), - buf.get_info().get_format().get_chromaton(1).unwrap().get_depth(), - buf.get_info().get_format().get_chromaton(2).unwrap().get_depth() - ]; - let masks: [u16; 3] = [ - (1 << depths[0]) - 1, - (1 << depths[1]) - 1, - (1 << depths[2]) - 1 - ]; - let shifts: [u8; 3] = [ - buf.get_info().get_format().get_chromaton(0).unwrap().get_shift(), - buf.get_info().get_format().get_chromaton(1).unwrap().get_shift(), - buf.get_info().get_format().get_chromaton(2).unwrap().get_shift() - ]; - let mut line: Vec = Vec::with_capacity(w * 3); - line.resize(w * 3, 0); - for src in dta.chunks(stride) { - for x in 0..w { - let elem = src[x]; - let r = ((elem >> shifts[0]) & masks[0]) << (8 - depths[0]); - let g = ((elem >> shifts[1]) & masks[1]) << (8 - depths[1]); - let b = ((elem >> shifts[2]) & masks[2]) << (8 - depths[2]); - line[x * 3 + 0] = r as u8; - line[x * 3 + 1] = g as u8; - line[x * 3 + 2] = b as u8; - } - ofile.write_all(line.as_slice()).unwrap(); - } - } else { -panic!(" unhandled buf format"); - } } /*fn open_wav_out(pfx: &str, strno: usize) -> WavWriter { @@ -203,7 +161,7 @@ pub fn test_file_decoding(demuxer: &str, name: &str, limit: Option, let mut br = ByteReader::new(&mut fr); let mut dmx = create_demuxer(dmx_f, &mut br).unwrap(); - let mut decs: Vec>> = Vec::new(); + let mut decs: Vec, Box)>> = Vec::new(); for i in 0..dmx.get_num_streams() { let s = dmx.get_stream(i).unwrap(); let info = s.get_info(); @@ -211,8 +169,9 @@ pub fn test_file_decoding(demuxer: &str, name: &str, limit: Option, if let Some(df) = decfunc { if (decode_video && info.is_video()) || (decode_audio && info.is_audio()) { let mut dec = (df)(); - dec.init(info).unwrap(); - decs.push(Some(dec)); + let mut dsupp = Box::new(NADecoderSupport::new()); + dec.init(&mut dsupp, info).unwrap(); + decs.push(Some((dsupp, dec))); } else { decs.push(None); } @@ -232,8 +191,8 @@ pub fn test_file_decoding(demuxer: &str, name: &str, limit: Option, if pkt.get_pts().unwrap() > limit.unwrap() { break; } } let streamno = pkt.get_stream().get_id() as usize; - if let Some(ref mut dec) = decs[streamno] { - let frm = dec.decode(&pkt).unwrap(); + if let Some((ref mut dsupp, ref mut dec)) = decs[streamno] { + let frm = dec.decode(dsupp, &pkt).unwrap(); if pkt.get_stream().get_info().is_video() && video_pfx.is_some() && frm.get_frame_type() != FrameType::Skip { let pfx = video_pfx.unwrap(); let pts = if let Some(fpts) = frm.get_pts() { fpts } else { pkt.get_pts().unwrap() }; @@ -260,7 +219,7 @@ pub fn test_decode_audio(demuxer: &str, name: &str, limit: Option, audio_pf let mut br = ByteReader::new(&mut fr); let mut dmx = create_demuxer(dmx_f, &mut br).unwrap(); - let mut decs: Vec>> = Vec::new(); + let mut decs: Vec, Box)>> = Vec::new(); for i in 0..dmx.get_num_streams() { let s = dmx.get_stream(i).unwrap(); let info = s.get_info(); @@ -268,8 +227,9 @@ pub fn test_decode_audio(demuxer: &str, name: &str, limit: Option, audio_pf if let Some(df) = decfunc { if info.is_audio() { let mut dec = (df)(); - dec.init(info).unwrap(); - decs.push(Some(dec)); + let mut dsupp = Box::new(NADecoderSupport::new()); + dec.init(&mut dsupp, info).unwrap(); + decs.push(Some((dsupp, dec))); } else { decs.push(None); } @@ -296,8 +256,8 @@ pub fn test_decode_audio(demuxer: &str, name: &str, limit: Option, audio_pf if pkt.get_pts().unwrap() > limit.unwrap() { break; } } let streamno = pkt.get_stream().get_id() as usize; - if let Some(ref mut dec) = decs[streamno] { - let frm = dec.decode(&pkt).unwrap(); + if let Some((ref mut dsupp, ref mut dec)) = decs[streamno] { + let frm = dec.decode(dsupp, &pkt).unwrap(); if frm.get_info().is_audio() { if !wrote_header { wwr.write_header(frm.get_info().as_ref().get_properties().get_audio_info().unwrap()).unwrap();