X-Git-Url: https://git.nihav.org/?a=blobdiff_plain;f=nihav-realmedia%2Fsrc%2Fcodecs%2Frv3040.rs;h=1e83d31e5227fa177af2297352b929772b8f96f3;hb=61d3e29467a4a634bbca56b8acfadcc346122a50;hp=2d76eaa3e6d2b8ff8f11ea9b129bcd4a66add547;hpb=5641dccfbf2a70d589cf094a0d4ed5a10f919f00;p=nihav.git diff --git a/nihav-realmedia/src/codecs/rv3040.rs b/nihav-realmedia/src/codecs/rv3040.rs index 2d76eaa..1e83d31 100644 --- a/nihav-realmedia/src/codecs/rv3040.rs +++ b/nihav-realmedia/src/codecs/rv3040.rs @@ -1,6 +1,6 @@ use nihav_core::formats::YUV420_FORMAT; -use nihav_core::frame::{NABufferType, NAVideoInfo, NAVideoBuffer, FrameType, alloc_video_buffer}; -use nihav_core::codecs::{MV, ZERO_MV, DecoderError, DecoderResult, IPBShuffler}; +use nihav_core::frame::{NABufferType, NAVideoInfo, NAVideoBuffer, NAVideoBufferRef, FrameType, alloc_video_buffer}; +use nihav_core::codecs::{NADecoderSupport, MV, ZERO_MV, DecoderError, DecoderResult, IPBShuffler}; use nihav_core::io::bitreader::{BitReader,BitReaderMode}; use nihav_core::io::intcode::*; use std::mem; @@ -716,13 +716,13 @@ fn decode_mv(br: &mut BitReader) -> DecoderResult { Ok(MV{ x: x, y: y }) } -fn do_mc_16x16(dsp: &Box, buf: &mut NAVideoBuffer, prevbuf: &NAVideoBuffer, mb_x: usize, mb_y: usize, mv: MV, avg: bool) { +fn do_mc_16x16(dsp: &Box, buf: &mut NAVideoBuffer, prevbuf: &NAVideoBuffer, mb_x: usize, mb_y: usize, mv: MV, avg: bool) { dsp.do_luma_mc (buf, prevbuf, mb_x * 16, mb_y * 16, mv, true, avg); dsp.do_chroma_mc(buf, prevbuf, mb_x * 8, mb_y * 8, 1, mv, true, avg); dsp.do_chroma_mc(buf, prevbuf, mb_x * 8, mb_y * 8, 2, mv, true, avg); } -fn do_mc_8x8(dsp: &Box, buf: &mut NAVideoBuffer, prevbuf: &NAVideoBuffer, mb_x: usize, xoff: usize, mb_y: usize, yoff: usize, mv: MV, avg: bool) { +fn do_mc_8x8(dsp: &Box, buf: &mut NAVideoBuffer, prevbuf: &NAVideoBuffer, mb_x: usize, xoff: usize, mb_y: usize, yoff: usize, mv: MV, avg: bool) { dsp.do_luma_mc (buf, prevbuf, mb_x * 16 + xoff * 8, mb_y * 16 + yoff * 8, mv, false, avg); dsp.do_chroma_mc(buf, prevbuf, mb_x * 8 + xoff * 4, mb_y * 8 + yoff * 4, 1, mv, false, avg); dsp.do_chroma_mc(buf, prevbuf, mb_x * 8 + xoff * 4, mb_y * 8 + yoff * 4, 2, mv, false, avg); @@ -735,7 +735,7 @@ fn do_avg(cdsp: &RV34CommonDSP, buf: &mut NAVideoBuffer, avg_buf: &NAVideoBu let csize = if comp == 0 { size } else { size >> 1 }; let dstride = buf.get_stride(comp); let doffset = buf.get_offset(comp) + xoff + yoff * dstride; - let mut data = buf.get_data_mut(); + let data = buf.get_data_mut().unwrap(); let dst: &mut [u8] = data.as_mut_slice(); let sstride = avg_buf.get_stride(comp); @@ -754,7 +754,7 @@ fn do_avg(cdsp: &RV34CommonDSP, buf: &mut NAVideoBuffer, avg_buf: &NAVideoBu pub struct RV34Decoder { is_rv30: bool, coderead: RV34Codes, - dsp: Box, + dsp: Box, cdsp: RV34CommonDSP, width: usize, height: usize, @@ -767,14 +767,14 @@ pub struct RV34Decoder { ratio2: u32, is_b: bool, mbinfo: Vec, - avg_buf: NAVideoBuffer, + avg_buf: NAVideoBufferRef, base_ts: u64, } impl RV34Decoder { - pub fn new(is_rv30: bool, dsp: Box) -> Self { + pub fn new(is_rv30: bool, dsp: Box) -> Self { let tmp_vinfo = NAVideoInfo::new(16, 16, false, YUV420_FORMAT); - let mut vt = alloc_video_buffer(tmp_vinfo, 4).unwrap(); + let vt = alloc_video_buffer(tmp_vinfo, 4).unwrap(); let vb = vt.get_vbuf(); let avg_buf = vb.unwrap(); RV34Decoder { @@ -839,7 +839,7 @@ impl RV34Decoder { } let stride = buf.get_stride(0); let mut offset = buf.get_offset(0) + sstate.mb_x * 16 + sstate.mb_y * 16 * stride; - let mut data = buf.get_data_mut(); + let data = buf.get_data_mut().unwrap(); let framebuf: &mut [u8] = data.as_mut_slice(); if is_16 { @@ -890,7 +890,7 @@ impl RV34Decoder { for comp in 1..3 { let stride = buf.get_stride(comp); let mut offset = buf.get_offset(comp) + sstate.mb_x * 8 + sstate.mb_y * 8 * stride; - let mut data = buf.get_data_mut(); + let data = buf.get_data_mut().unwrap(); let framebuf: &mut [u8] = data.as_mut_slice(); if is_16 { let im8 = imode.get_pred8_type(sstate.has_top, sstate.has_left); @@ -1015,7 +1015,7 @@ impl RV34Decoder { } let stride = buf.get_stride(0); let mut offset = buf.get_offset(0) + sstate.mb_x * 16 + sstate.mb_y * 16 * stride; - let mut data = buf.get_data_mut(); + let data = buf.get_data_mut().unwrap(); let framebuf: &mut [u8] = data.as_mut_slice(); for y in 0..4 { @@ -1050,7 +1050,7 @@ impl RV34Decoder { for comp in 1..3 { let stride = buf.get_stride(comp); let mut offset = buf.get_offset(comp) + sstate.mb_x * 8 + sstate.mb_y * 8 * stride; - let mut data = buf.get_data_mut(); + let data = buf.get_data_mut().unwrap(); let framebuf: &mut [u8] = data.as_mut_slice(); for _ in 0..2 { for x in 0..2 { @@ -1112,7 +1112,7 @@ impl RV34Decoder { } } - pub fn parse_frame(&mut self, src: &[u8], bd: &mut RV34BitstreamDecoder) -> DecoderResult<(NABufferType, FrameType, u64)> { + pub fn parse_frame(&mut self, supp: &mut NADecoderSupport, src: &[u8], bd: &mut RV34BitstreamDecoder) -> DecoderResult<(NABufferType, FrameType, u64)> { let mut slice_offs: Vec = Vec::new(); parse_slice_offsets(src, &mut slice_offs)?; let ini_off = slice_offs.len() * 8 + 1; @@ -1157,10 +1157,21 @@ impl RV34Decoder { //todo validate against ref frame let vinfo = NAVideoInfo::new(hdr0.width, hdr0.height, false, YUV420_FORMAT); - let bufret = alloc_video_buffer(vinfo, 4); - if let Err(_) = bufret { return Err(DecoderError::InvalidData); } - let mut bufinfo = bufret.unwrap(); - let mut buf = bufinfo.get_vbuf().unwrap(); + let ret = supp.pool_u8.get_free(); + if ret.is_none() { + return Err(DecoderError::AllocError); + } + let mut buf = ret.unwrap(); + if buf.get_info() != vinfo { + self.ipbs.clear(); + supp.pool_u8.reset(); + supp.pool_u8.prealloc_video(vinfo, 4)?; + let ret = supp.pool_u8.get_free(); + if ret.is_none() { + return Err(DecoderError::AllocError); + } + buf = ret.unwrap(); + } sstate.q = q; sstate.has_top = false; @@ -1269,11 +1280,11 @@ impl RV34Decoder { self.dsp.loop_filter(&mut buf, hdr0.ftype, &mbinfo, mb_w, mb_h - 1); } if !self.is_b { - self.ipbs.add_frame(buf); + self.ipbs.add_frame(buf.clone()); mem::swap(&mut self.mvi, &mut self.ref_mvi); mem::swap(&mut self.mbinfo, &mut mbinfo); } - Ok((bufinfo, hdr0.ftype, ts)) + Ok((NABufferType::Video(buf), hdr0.ftype, ts)) } }