X-Git-Url: https://git.nihav.org/?p=nihav.git;a=blobdiff_plain;f=nihav-core%2Fsrc%2Fframe.rs;h=f596dc95ad2b1d02b684c6563be6bd0492a4e5ec;hp=6dfc5eb98d50a665963d388e75d99ae2c6163be4;hb=b191eef3e3e1b6bea510c7e64606d8442f974f8b;hpb=dc45d8ce2269e638c10a18f99aed3294fd5de827 diff --git a/nihav-core/src/frame.rs b/nihav-core/src/frame.rs index 6dfc5eb..f596dc9 100644 --- a/nihav-core/src/frame.rs +++ b/nihav-core/src/frame.rs @@ -1,26 +1,38 @@ +//! Packets and decoded frames functionality. use std::cmp::max; -use std::collections::HashMap; +//use std::collections::HashMap; use std::fmt; -use std::sync::Arc; +pub use std::sync::Arc; pub use crate::formats::*; pub use crate::refs::*; +use std::str::FromStr; +/// Audio stream information. #[allow(dead_code)] #[derive(Clone,Copy,PartialEq)] pub struct NAAudioInfo { - sample_rate: u32, - channels: u8, - format: NASoniton, - block_len: usize, + /// Sample rate. + pub sample_rate: u32, + /// Number of channels. + pub channels: u8, + /// Audio sample format. + pub format: NASoniton, + /// Length of one audio block in samples. + pub block_len: usize, } impl NAAudioInfo { + /// Constructs a new `NAAudioInfo` instance. pub fn new(sr: u32, ch: u8, fmt: NASoniton, bl: usize) -> Self { NAAudioInfo { sample_rate: sr, channels: ch, format: fmt, block_len: bl } } + /// Returns audio sample rate. pub fn get_sample_rate(&self) -> u32 { self.sample_rate } + /// Returns the number of channels. pub fn get_channels(&self) -> u8 { self.channels } + /// Returns sample format. pub fn get_format(&self) -> NASoniton { self.format } + /// Returns one audio block duration in samples. pub fn get_block_len(&self) -> usize { self.block_len } } @@ -30,24 +42,39 @@ impl fmt::Display for NAAudioInfo { } } +/// Video stream information. #[allow(dead_code)] #[derive(Clone,Copy,PartialEq)] pub struct NAVideoInfo { - width: usize, - height: usize, - flipped: bool, - format: NAPixelFormaton, + /// Picture width. + pub width: usize, + /// Picture height. + pub height: usize, + /// Picture is stored downside up. + pub flipped: bool, + /// Picture pixel format. + pub format: NAPixelFormaton, + /// Declared bits per sample. + pub bits: u8, } impl NAVideoInfo { + /// Constructs a new `NAVideoInfo` instance. pub fn new(w: usize, h: usize, flip: bool, fmt: NAPixelFormaton) -> Self { - NAVideoInfo { width: w, height: h, flipped: flip, format: fmt } + let bits = fmt.get_total_depth(); + NAVideoInfo { width: w, height: h, flipped: flip, format: fmt, bits } } + /// Returns picture width. pub fn get_width(&self) -> usize { self.width as usize } + /// Returns picture height. pub fn get_height(&self) -> usize { self.height as usize } + /// Returns picture orientation. pub fn is_flipped(&self) -> bool { self.flipped } + /// Returns picture pixel format. pub fn get_format(&self) -> NAPixelFormaton { self.format } + /// Sets new picture width. pub fn set_width(&mut self, w: usize) { self.width = w; } + /// Sets new picture height. pub fn set_height(&mut self, h: usize) { self.height = h; } } @@ -57,32 +84,40 @@ impl fmt::Display for NAVideoInfo { } } +/// A list of possible stream information types. #[derive(Clone,Copy,PartialEq)] pub enum NACodecTypeInfo { + /// No codec present. None, + /// Audio codec information. Audio(NAAudioInfo), + /// Video codec information. Video(NAVideoInfo), } impl NACodecTypeInfo { + /// Returns video stream information. pub fn get_video_info(&self) -> Option { match *self { NACodecTypeInfo::Video(vinfo) => Some(vinfo), _ => None, } } + /// Returns audio stream information. pub fn get_audio_info(&self) -> Option { match *self { NACodecTypeInfo::Audio(ainfo) => Some(ainfo), _ => None, } } + /// Reports whether the current stream is video stream. pub fn is_video(&self) -> bool { match *self { NACodecTypeInfo::Video(_) => true, _ => false, } } + /// Reports whether the current stream is audio stream. pub fn is_audio(&self) -> bool { match *self { NACodecTypeInfo::Audio(_) => true, @@ -102,6 +137,10 @@ impl fmt::Display for NACodecTypeInfo { } } +/// Decoded video frame. +/// +/// NihAV frames are stored in native type (8/16/32-bit elements) inside a single buffer. +/// In case of image with several components those components are stored sequentially and can be accessed in the buffer starting at corresponding component offset. #[derive(Clone)] pub struct NAVideoBuffer { info: NAVideoInfo, @@ -111,13 +150,24 @@ pub struct NAVideoBuffer { } impl NAVideoBuffer { + /// Constructs video buffer from the provided components. + pub fn from_raw_parts(info: NAVideoInfo, data: NABufferRef>, offs: Vec, strides: Vec) -> Self { + Self { info, data, offs, strides } + } + /// Returns the component offset (0 for all unavailable offsets). pub fn get_offset(&self, idx: usize) -> usize { if idx >= self.offs.len() { 0 } else { self.offs[idx] } } + /// Returns picture info. pub fn get_info(&self) -> NAVideoInfo { self.info } + /// Returns an immutable reference to the data. pub fn get_data(&self) -> &Vec { self.data.as_ref() } + /// Returns a mutable reference to the data. pub fn get_data_mut(&mut self) -> Option<&mut Vec> { self.data.as_mut() } + /// Returns the number of components in picture format. + pub fn get_num_components(&self) -> usize { self.offs.len() } + /// Creates a copy of current `NAVideoBuffer`. pub fn copy_buffer(&mut self) -> Self { let mut data: Vec = Vec::with_capacity(self.data.len()); data.clone_from(self.data.as_ref()); @@ -127,71 +177,140 @@ impl NAVideoBuffer { strides.clone_from(&self.strides); NAVideoBuffer { info: self.info, data: NABufferRef::new(data), offs, strides } } + /// Returns stride (distance between subsequent lines) for the requested component. pub fn get_stride(&self, idx: usize) -> usize { if idx >= self.strides.len() { return 0; } self.strides[idx] } + /// Returns requested component dimensions. pub fn get_dimensions(&self, idx: usize) -> (usize, usize) { get_plane_size(&self.info, idx) } + /// Converts current instance into buffer reference. pub fn into_ref(self) -> NABufferRef { NABufferRef::new(self) } + + fn print_contents(&self, datatype: &str) { + println!("{} video buffer size {}", datatype, self.data.len()); + println!(" format {}", self.info); + print!(" offsets:"); + for off in self.offs.iter() { + print!(" {}", *off); + } + println!(); + print!(" strides:"); + for stride in self.strides.iter() { + print!(" {}", *stride); + } + println!(); + } } +/// A specialised type for reference-counted `NAVideoBuffer`. pub type NAVideoBufferRef = NABufferRef>; +/// Decoded audio frame. +/// +/// NihAV frames are stored in native type (8/16/32-bit elements) inside a single buffer. +/// In case of planar audio samples for each channel are stored sequentially and can be accessed in the buffer starting at corresponding channel offset. #[derive(Clone)] pub struct NAAudioBuffer { info: NAAudioInfo, data: NABufferRef>, offs: Vec, + stride: usize, + step: usize, chmap: NAChannelMap, len: usize, } impl NAAudioBuffer { + /// Returns the start position of requested channel data. pub fn get_offset(&self, idx: usize) -> usize { if idx >= self.offs.len() { 0 } else { self.offs[idx] } } + /// Returns the distance between the start of one channel and the next one. + pub fn get_stride(&self) -> usize { self.stride } + /// Returns the distance between the samples in one channel. + pub fn get_step(&self) -> usize { self.step } + /// Returns audio format information. pub fn get_info(&self) -> NAAudioInfo { self.info } - pub fn get_chmap(&self) -> NAChannelMap { self.chmap.clone() } + /// Returns channel map. + pub fn get_chmap(&self) -> &NAChannelMap { &self.chmap } + /// Returns an immutable reference to the data. pub fn get_data(&self) -> &Vec { self.data.as_ref() } + /// Returns reference to the data. + pub fn get_data_ref(&self) -> NABufferRef> { self.data.clone() } + /// Returns a mutable reference to the data. pub fn get_data_mut(&mut self) -> Option<&mut Vec> { self.data.as_mut() } + /// Clones current `NAAudioBuffer` into a new one. pub fn copy_buffer(&mut self) -> Self { let mut data: Vec = Vec::with_capacity(self.data.len()); data.clone_from(self.data.as_ref()); let mut offs: Vec = Vec::with_capacity(self.offs.len()); offs.clone_from(&self.offs); - NAAudioBuffer { info: self.info, data: NABufferRef::new(data), offs, chmap: self.get_chmap(), len: self.len } + NAAudioBuffer { info: self.info, data: NABufferRef::new(data), offs, chmap: self.get_chmap().clone(), len: self.len, stride: self.stride, step: self.step } } + /// Return the length of frame in samples. pub fn get_length(&self) -> usize { self.len } + /// Truncates buffer length if possible. + /// + /// In case when new length is larger than old length nothing is done. + pub fn truncate(&mut self, new_len: usize) { + self.len = self.len.min(new_len); + } + + fn print_contents(&self, datatype: &str) { + println!("Audio buffer with {} data, stride {}, step {}", datatype, self.stride, self.step); + println!(" format {}", self.info); + println!(" channel map {}", self.chmap); + print!(" offsets:"); + for off in self.offs.iter() { + print!(" {}", *off); + } + println!(); + } } impl NAAudioBuffer { + /// Constructs a new `NAAudioBuffer` instance. pub fn new_from_buf(info: NAAudioInfo, data: NABufferRef>, chmap: NAChannelMap) -> Self { let len = data.len(); - NAAudioBuffer { info, data, chmap, offs: Vec::new(), len } + NAAudioBuffer { info, data, chmap, offs: Vec::new(), len, stride: 0, step: 0 } } } +/// A list of possible decoded frame types. #[derive(Clone)] pub enum NABufferType { + /// 8-bit video buffer. Video (NAVideoBufferRef), + /// 16-bit video buffer (i.e. every component or packed pixel fits into 16 bits). Video16 (NAVideoBufferRef), + /// 32-bit video buffer (i.e. every component or packed pixel fits into 32 bits). Video32 (NAVideoBufferRef), + /// Packed video buffer. VideoPacked(NAVideoBufferRef), + /// Audio buffer with 8-bit unsigned integer audio. AudioU8 (NAAudioBuffer), + /// Audio buffer with 16-bit signed integer audio. AudioI16 (NAAudioBuffer), + /// Audio buffer with 32-bit signed integer audio. AudioI32 (NAAudioBuffer), + /// Audio buffer with 32-bit floating point audio. AudioF32 (NAAudioBuffer), + /// Packed audio buffer. AudioPacked(NAAudioBuffer), + /// Buffer with generic data (e.g. subtitles). Data (NABufferRef>), + /// No data present. None, } impl NABufferType { + /// Returns the offset to the requested component or channel. pub fn get_offset(&self, idx: usize) -> usize { match *self { NABufferType::Video(ref vb) => vb.get_offset(idx), @@ -200,11 +319,13 @@ impl NABufferType { NABufferType::VideoPacked(ref vb) => vb.get_offset(idx), NABufferType::AudioU8(ref ab) => ab.get_offset(idx), NABufferType::AudioI16(ref ab) => ab.get_offset(idx), + NABufferType::AudioI32(ref ab) => ab.get_offset(idx), NABufferType::AudioF32(ref ab) => ab.get_offset(idx), NABufferType::AudioPacked(ref ab) => ab.get_offset(idx), _ => 0, } } + /// Returns information for video frames. pub fn get_video_info(&self) -> Option { match *self { NABufferType::Video(ref vb) => Some(vb.get_info()), @@ -214,6 +335,7 @@ impl NABufferType { _ => None, } } + /// Returns reference to 8-bit (or packed) video buffer. pub fn get_vbuf(&self) -> Option> { match *self { NABufferType::Video(ref vb) => Some(vb.clone()), @@ -221,18 +343,87 @@ impl NABufferType { _ => None, } } + /// Returns reference to 16-bit video buffer. pub fn get_vbuf16(&self) -> Option> { match *self { NABufferType::Video16(ref vb) => Some(vb.clone()), _ => None, } } + /// Returns reference to 32-bit video buffer. pub fn get_vbuf32(&self) -> Option> { match *self { NABufferType::Video32(ref vb) => Some(vb.clone()), _ => None, } } + /// Returns information for audio frames. + pub fn get_audio_info(&self) -> Option { + match *self { + NABufferType::AudioU8(ref ab) => Some(ab.get_info()), + NABufferType::AudioI16(ref ab) => Some(ab.get_info()), + NABufferType::AudioI32(ref ab) => Some(ab.get_info()), + NABufferType::AudioF32(ref ab) => Some(ab.get_info()), + NABufferType::AudioPacked(ref ab) => Some(ab.get_info()), + _ => None, + } + } + /// Returns audio channel map. + pub fn get_chmap(&self) -> Option<&NAChannelMap> { + match *self { + NABufferType::AudioU8(ref ab) => Some(ab.get_chmap()), + NABufferType::AudioI16(ref ab) => Some(ab.get_chmap()), + NABufferType::AudioI32(ref ab) => Some(ab.get_chmap()), + NABufferType::AudioF32(ref ab) => Some(ab.get_chmap()), + NABufferType::AudioPacked(ref ab) => Some(ab.get_chmap()), + _ => None, + } + } + /// Returns audio frame duration in samples. + pub fn get_audio_length(&self) -> usize { + match *self { + NABufferType::AudioU8(ref ab) => ab.get_length(), + NABufferType::AudioI16(ref ab) => ab.get_length(), + NABufferType::AudioI32(ref ab) => ab.get_length(), + NABufferType::AudioF32(ref ab) => ab.get_length(), + NABufferType::AudioPacked(ref ab) => ab.get_length(), + _ => 0, + } + } + /// Truncates audio frame duration if possible. + pub fn truncate_audio(&mut self, len: usize) { + match *self { + NABufferType::AudioU8(ref mut ab) => ab.truncate(len), + NABufferType::AudioI16(ref mut ab) => ab.truncate(len), + NABufferType::AudioI32(ref mut ab) => ab.truncate(len), + NABufferType::AudioF32(ref mut ab) => ab.truncate(len), + NABufferType::AudioPacked(ref mut ab) => ab.truncate(len), + _ => {}, + }; + } + /// Returns the distance between starts of two channels. + pub fn get_audio_stride(&self) -> usize { + match *self { + NABufferType::AudioU8(ref ab) => ab.get_stride(), + NABufferType::AudioI16(ref ab) => ab.get_stride(), + NABufferType::AudioI32(ref ab) => ab.get_stride(), + NABufferType::AudioF32(ref ab) => ab.get_stride(), + NABufferType::AudioPacked(ref ab) => ab.get_stride(), + _ => 0, + } + } + /// Returns the distance between two samples in one channel. + pub fn get_audio_step(&self) -> usize { + match *self { + NABufferType::AudioU8(ref ab) => ab.get_step(), + NABufferType::AudioI16(ref ab) => ab.get_step(), + NABufferType::AudioI32(ref ab) => ab.get_step(), + NABufferType::AudioF32(ref ab) => ab.get_step(), + NABufferType::AudioPacked(ref ab) => ab.get_step(), + _ => 0, + } + } + /// Returns reference to 8-bit (or packed) audio buffer. pub fn get_abuf_u8(&self) -> Option> { match *self { NABufferType::AudioU8(ref ab) => Some(ab.clone()), @@ -240,38 +431,66 @@ impl NABufferType { _ => None, } } + /// Returns reference to 16-bit audio buffer. pub fn get_abuf_i16(&self) -> Option> { match *self { NABufferType::AudioI16(ref ab) => Some(ab.clone()), _ => None, } } + /// Returns reference to 32-bit integer audio buffer. pub fn get_abuf_i32(&self) -> Option> { match *self { NABufferType::AudioI32(ref ab) => Some(ab.clone()), _ => None, } } + /// Returns reference to 32-bit floating point audio buffer. pub fn get_abuf_f32(&self) -> Option> { match *self { NABufferType::AudioF32(ref ab) => Some(ab.clone()), _ => None, } } + /// Prints internal buffer layout. + pub fn print_buffer_metadata(&self) { + match *self { + NABufferType::Video(ref buf) => buf.print_contents("8-bit"), + NABufferType::Video16(ref buf) => buf.print_contents("16-bit"), + NABufferType::Video32(ref buf) => buf.print_contents("32-bit"), + NABufferType::VideoPacked(ref buf) => buf.print_contents("packed"), + NABufferType::AudioU8(ref buf) => buf.print_contents("8-bit unsigned integer"), + NABufferType::AudioI16(ref buf) => buf.print_contents("16-bit integer"), + NABufferType::AudioI32(ref buf) => buf.print_contents("32-bit integer"), + NABufferType::AudioF32(ref buf) => buf.print_contents("32-bit float"), + NABufferType::AudioPacked(ref buf) => buf.print_contents("packed"), + NABufferType::Data(ref buf) => { println!("Data buffer, len = {}", buf.len()); }, + NABufferType::None => { println!("No buffer"); }, + }; + } } const NA_SIMPLE_VFRAME_COMPONENTS: usize = 4; +/// Simplified decoded frame data. pub struct NASimpleVideoFrame<'a, T: Copy> { + /// Widths of each picture component. pub width: [usize; NA_SIMPLE_VFRAME_COMPONENTS], + /// Heights of each picture component. pub height: [usize; NA_SIMPLE_VFRAME_COMPONENTS], + /// Orientation (upside-down or downside-up) flag. pub flip: bool, + /// Strides for each component. pub stride: [usize; NA_SIMPLE_VFRAME_COMPONENTS], + /// Start of each component. pub offset: [usize; NA_SIMPLE_VFRAME_COMPONENTS], + /// Number of components. pub components: usize, + /// Pointer to the picture pixel data. pub data: &'a mut [T], } impl<'a, T:Copy> NASimpleVideoFrame<'a, T> { + /// Constructs a new instance of `NASimpleVideoFrame` from `NAVideoBuffer`. pub fn from_video_buf(vbuf: &'a mut NAVideoBuffer) -> Option { let vinfo = vbuf.get_info(); let components = vinfo.format.components as usize; @@ -302,12 +521,18 @@ impl<'a, T:Copy> NASimpleVideoFrame<'a, T> { } } +/// A list of possible frame allocator errors. #[derive(Debug,Clone,Copy,PartialEq)] pub enum AllocatorError { + /// Requested picture dimensions are too large. TooLargeDimensions, + /// Invalid input format. FormatError, } +/// Constructs a new video buffer with requested format. +/// +/// `align` is power of two alignment for image. E.g. the value of 5 means that frame dimensions will be padded to be multiple of 32. pub fn alloc_video_buffer(vinfo: NAVideoInfo, align: u8) -> Result { let fmt = &vinfo.format; let mut new_size: usize = 0; @@ -360,9 +585,7 @@ pub fn alloc_video_buffer(vinfo: NAVideoInfo, align: u8) -> Result Result Result Result { let mut offs: Vec = Vec::new(); - if ainfo.format.is_planar() || (ainfo.channels == 1 && (ainfo.format.get_bits() % 8) == 0) { + if ainfo.format.is_planar() || ((ainfo.format.get_bits() % 8) == 0) { let len = nsamples.checked_mul(ainfo.channels as usize); if len == None { return Err(AllocatorError::TooLargeDimensions); } let length = len.unwrap(); - for i in 0..ainfo.channels { - offs.push((i as usize) * nsamples); + let stride; + let step; + if ainfo.format.is_planar() { + stride = nsamples; + step = 1; + for i in 0..ainfo.channels { + offs.push((i as usize) * stride); + } + } else { + stride = 1; + step = ainfo.channels as usize; + for i in 0..ainfo.channels { + offs.push(i as usize); + } } if ainfo.format.is_float() { if ainfo.format.get_bits() == 32 { let data: Vec = vec![0.0; length]; - let buf: NAAudioBuffer = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs, chmap, len: nsamples }; + let buf: NAAudioBuffer = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs, chmap, len: nsamples, stride, step }; Ok(NABufferType::AudioF32(buf)) } else { Err(AllocatorError::TooLargeDimensions) @@ -443,12 +676,16 @@ pub fn alloc_audio_buffer(ainfo: NAAudioInfo, nsamples: usize, chmap: NAChannelM } else { if ainfo.format.get_bits() == 8 && !ainfo.format.is_signed() { let data: Vec = vec![0; length]; - let buf: NAAudioBuffer = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs, chmap, len: nsamples }; + let buf: NAAudioBuffer = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs, chmap, len: nsamples, stride, step }; Ok(NABufferType::AudioU8(buf)) } else if ainfo.format.get_bits() == 16 && ainfo.format.is_signed() { let data: Vec = vec![0; length]; - let buf: NAAudioBuffer = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs, chmap, len: nsamples }; + let buf: NAAudioBuffer = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs, chmap, len: nsamples, stride, step }; Ok(NABufferType::AudioI16(buf)) + } else if ainfo.format.get_bits() == 32 && ainfo.format.is_signed() { + let data: Vec = vec![0; length]; + let buf: NAAudioBuffer = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs, chmap, len: nsamples, stride, step }; + Ok(NABufferType::AudioI32(buf)) } else { Err(AllocatorError::TooLargeDimensions) } @@ -458,21 +695,27 @@ pub fn alloc_audio_buffer(ainfo: NAAudioInfo, nsamples: usize, chmap: NAChannelM if len == None { return Err(AllocatorError::TooLargeDimensions); } let length = ainfo.format.get_audio_size(len.unwrap() as u64); let data: Vec = vec![0; length]; - let buf: NAAudioBuffer = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs, chmap, len: nsamples }; + let buf: NAAudioBuffer = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs, chmap, len: nsamples, stride: 0, step: 0 }; Ok(NABufferType::AudioPacked(buf)) } } +/// Constructs a new buffer for generic data. pub fn alloc_data_buffer(size: usize) -> Result { let data: Vec = vec![0; size]; let buf: NABufferRef> = NABufferRef::new(data); Ok(NABufferType::Data(buf)) } -pub fn copy_buffer(buf: NABufferType) -> NABufferType { +/// Creates a clone of current buffer. +pub fn copy_buffer(buf: &NABufferType) -> NABufferType { buf.clone() } +/// Video frame pool. +/// +/// This structure allows codec to effectively reuse old frames instead of allocating and de-allocating frames every time. +/// Caller can also reserve some frames for its own purposes e.g. display queue. pub struct NAVideoBufferPool { pool: Vec>, max_len: usize, @@ -480,6 +723,7 @@ pub struct NAVideoBufferPool { } impl NAVideoBufferPool { + /// Constructs a new `NAVideoBufferPool` instance. pub fn new(max_len: usize) -> Self { Self { pool: Vec::with_capacity(max_len), @@ -487,9 +731,11 @@ impl NAVideoBufferPool { add_len: 0, } } + /// Sets the number of buffers reserved for the user. pub fn set_dec_bufs(&mut self, add_len: usize) { self.add_len = add_len; } + /// Returns an unused buffer from the pool. pub fn get_free(&mut self) -> Option> { for e in self.pool.iter() { if e.get_num_refs() == 1 { @@ -498,17 +744,22 @@ impl NAVideoBufferPool { } None } + /// Clones provided frame data into a free pool frame. pub fn get_copy(&mut self, rbuf: &NAVideoBufferRef) -> Option> { let mut dbuf = self.get_free()?; dbuf.data.copy_from_slice(&rbuf.data); Some(dbuf) } + /// Clears the pool from all frames. pub fn reset(&mut self) { - self.pool.truncate(0); + self.pool.clear(); } } impl NAVideoBufferPool { + /// Allocates the target amount of video frames using [`alloc_video_buffer`]. + /// + /// [`alloc_video_buffer`]: ./fn.alloc_video_buffer.html pub fn prealloc_video(&mut self, vinfo: NAVideoInfo, align: u8) -> Result<(), AllocatorError> { let nbufs = self.max_len + self.add_len - self.pool.len(); for _ in 0..nbufs { @@ -526,6 +777,9 @@ impl NAVideoBufferPool { } impl NAVideoBufferPool { + /// Allocates the target amount of video frames using [`alloc_video_buffer`]. + /// + /// [`alloc_video_buffer`]: ./fn.alloc_video_buffer.html pub fn prealloc_video(&mut self, vinfo: NAVideoInfo, align: u8) -> Result<(), AllocatorError> { let nbufs = self.max_len + self.add_len - self.pool.len(); for _ in 0..nbufs { @@ -541,6 +795,9 @@ impl NAVideoBufferPool { } impl NAVideoBufferPool { + /// Allocates the target amount of video frames using [`alloc_video_buffer`]. + /// + /// [`alloc_video_buffer`]: ./fn.alloc_video_buffer.html pub fn prealloc_video(&mut self, vinfo: NAVideoInfo, align: u8) -> Result<(), AllocatorError> { let nbufs = self.max_len + self.add_len - self.pool.len(); for _ in 0..nbufs { @@ -555,6 +812,7 @@ impl NAVideoBufferPool { } } +/// Information about codec contained in a stream. #[allow(dead_code)] #[derive(Clone)] pub struct NACodecInfo { @@ -563,9 +821,11 @@ pub struct NACodecInfo { extradata: Option>>, } +/// A specialised type for reference-counted `NACodecInfo`. pub type NACodecInfoRef = Arc; impl NACodecInfo { + /// Constructs a new instance of `NACodecInfo`. pub fn new(name: &'static str, p: NACodecTypeInfo, edata: Option>) -> Self { let extradata = match edata { None => None, @@ -573,27 +833,36 @@ impl NACodecInfo { }; NACodecInfo { name, properties: p, extradata } } + /// Constructs a new reference-counted instance of `NACodecInfo`. pub fn new_ref(name: &'static str, p: NACodecTypeInfo, edata: Option>>) -> Self { NACodecInfo { name, properties: p, extradata: edata } } + /// Converts current instance into a reference-counted one. pub fn into_ref(self) -> NACodecInfoRef { Arc::new(self) } + /// Returns codec information. pub fn get_properties(&self) -> NACodecTypeInfo { self.properties } + /// Returns additional initialisation data required by the codec. pub fn get_extradata(&self) -> Option>> { if let Some(ref vec) = self.extradata { return Some(vec.clone()); } None } + /// Returns codec name. pub fn get_name(&self) -> &'static str { self.name } + /// Reports whether it is a video codec. pub fn is_video(&self) -> bool { if let NACodecTypeInfo::Video(_) = self.properties { return true; } false } + /// Reports whether it is an audio codec. pub fn is_audio(&self) -> bool { if let NACodecTypeInfo::Audio(_) = self.properties { return true; } false } + /// Constructs a new empty reference-counted instance of `NACodecInfo`. pub fn new_dummy() -> Arc { Arc::new(DUMMY_CODEC_INFO) } + /// Updates codec infomation. pub fn replace_info(&self, p: NACodecTypeInfo) -> Arc { Arc::new(NACodecInfo { name: self.name, properties: p, extradata: self.extradata.clone() }) } @@ -613,27 +882,27 @@ impl fmt::Display for NACodecInfo { } } +/// Default empty codec information. pub const DUMMY_CODEC_INFO: NACodecInfo = NACodecInfo { name: "none", properties: NACodecTypeInfo::None, extradata: None }; -#[derive(Debug,Clone)] -pub enum NAValue { - None, - Int(i32), - Long(i64), - String(String), - Data(Arc>), -} - +/// A list of recognized frame types. #[derive(Debug,Clone,Copy,PartialEq)] #[allow(dead_code)] pub enum FrameType { + /// Intra frame type. I, + /// Inter frame type. P, + /// Bidirectionally predicted frame. B, + /// Skip frame. + /// + /// When such frame is encountered then last frame should be used again if it is needed. Skip, + /// Some other frame type. Other, } @@ -649,38 +918,282 @@ impl fmt::Display for FrameType { } } +/// Timestamp information. #[derive(Debug,Clone,Copy)] pub struct NATimeInfo { - pts: Option, - dts: Option, - duration: Option, - tb_num: u32, - tb_den: u32, + /// Presentation timestamp. + pub pts: Option, + /// Decode timestamp. + pub dts: Option, + /// Duration (in timebase units). + pub duration: Option, + /// Timebase numerator. + pub tb_num: u32, + /// Timebase denominator. + pub tb_den: u32, } impl NATimeInfo { + /// Constructs a new `NATimeInfo` instance. pub fn new(pts: Option, dts: Option, duration: Option, tb_num: u32, tb_den: u32) -> Self { NATimeInfo { pts, dts, duration, tb_num, tb_den } } + /// Returns presentation timestamp. pub fn get_pts(&self) -> Option { self.pts } + /// Returns decoding timestamp. pub fn get_dts(&self) -> Option { self.dts } + /// Returns duration. pub fn get_duration(&self) -> Option { self.duration } + /// Sets new presentation timestamp. pub fn set_pts(&mut self, pts: Option) { self.pts = pts; } + /// Sets new decoding timestamp. pub fn set_dts(&mut self, dts: Option) { self.dts = dts; } + /// Sets new duration. pub fn set_duration(&mut self, dur: Option) { self.duration = dur; } + + /// Converts time in given scale into timestamp in given base. + #[allow(clippy::collapsible_if)] + pub fn time_to_ts(time: u64, base: u64, tb_num: u32, tb_den: u32) -> u64 { + let tb_num = u64::from(tb_num); + let tb_den = u64::from(tb_den); + let tmp = time.checked_mul(tb_den); + if let Some(tmp) = tmp { + tmp / base / tb_num + } else { + if tb_num < base { + let coarse = time / tb_num; + if let Some(tmp) = coarse.checked_mul(tb_den) { + tmp / base + } else { + (coarse / base) * tb_den + } + } else { + let coarse = time / base; + if let Some(tmp) = coarse.checked_mul(tb_den) { + tmp / tb_num + } else { + (coarse / tb_num) * tb_den + } + } + } + } + /// Converts timestamp in given base into time in given scale. + pub fn ts_to_time(ts: u64, base: u64, tb_num: u32, tb_den: u32) -> u64 { + let tb_num = u64::from(tb_num); + let tb_den = u64::from(tb_den); + let tmp = ts.checked_mul(base); + if let Some(tmp) = tmp { + let tmp2 = tmp.checked_mul(tb_num); + if let Some(tmp2) = tmp2 { + tmp2 / tb_den + } else { + (tmp / tb_den) * tb_num + } + } else { + let tmp = ts.checked_mul(tb_num); + if let Some(tmp) = tmp { + (tmp / tb_den) * base + } else { + (ts / tb_den) * base * tb_num + } + } + } + fn get_cur_ts(&self) -> u64 { self.pts.unwrap_or_else(|| self.dts.unwrap_or(0)) } + fn get_cur_millis(&self) -> u64 { + let ts = self.get_cur_ts(); + Self::ts_to_time(ts, 1000, self.tb_num, self.tb_den) + } + /// Checks whether the current time information is earler than provided reference time. + pub fn less_than(&self, time: NATimePoint) -> bool { + if self.pts.is_none() && self.dts.is_none() { + return true; + } + match time { + NATimePoint::PTS(rpts) => self.get_cur_ts() < rpts, + NATimePoint::Milliseconds(ms) => self.get_cur_millis() < ms, + NATimePoint::None => false, + } + } + /// Checks whether the current time information is the same as provided reference time. + pub fn equal(&self, time: NATimePoint) -> bool { + if self.pts.is_none() && self.dts.is_none() { + return time == NATimePoint::None; + } + match time { + NATimePoint::PTS(rpts) => self.get_cur_ts() == rpts, + NATimePoint::Milliseconds(ms) => self.get_cur_millis() == ms, + NATimePoint::None => false, + } + } } +/// Time information for specifying durations or seek positions. +#[derive(Clone,Copy,Debug,PartialEq)] +pub enum NATimePoint { + /// Time in milliseconds. + Milliseconds(u64), + /// Stream timestamp. + PTS(u64), + /// No time information present. + None, +} + +impl Default for NATimePoint { + fn default() -> Self { + NATimePoint::None + } +} + +impl fmt::Display for NATimePoint { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self { + NATimePoint::Milliseconds(millis) => { + let tot_s = millis / 1000; + let ms = millis % 1000; + if tot_s < 60 { + if ms != 0 { + return write!(f, "{}.{:03}", tot_s, ms); + } else { + return write!(f, "{}", tot_s); + } + } + let tot_m = tot_s / 60; + let s = tot_s % 60; + if tot_m < 60 { + if ms != 0 { + return write!(f, "{}:{:02}.{:03}", tot_m, s, ms); + } else { + return write!(f, "{}:{:02}", tot_m, s); + } + } + let h = tot_m / 60; + let m = tot_m % 60; + if ms != 0 { + write!(f, "{}:{:02}:{:02}.{:03}", h, m, s, ms) + } else { + write!(f, "{}:{:02}:{:02}", h, m, s) + } + }, + NATimePoint::PTS(pts) => { + write!(f, "{}pts", pts) + }, + NATimePoint::None => { + write!(f, "none") + }, + } + } +} + +impl FromStr for NATimePoint { + type Err = FormatParseError; + + /// Parses the string into time information. + /// + /// Accepted formats are `pts`, `ms` or `[hh:][mm:]ss[.ms]`. + fn from_str(s: &str) -> Result { + if s.is_empty() { + return Err(FormatParseError {}); + } + if !s.ends_with("pts") { + if s.ends_with("ms") { + let str_b = s.as_bytes(); + let num = std::str::from_utf8(&str_b[..str_b.len() - 2]).unwrap(); + let ret = num.parse::(); + if let Ok(val) = ret { + return Ok(NATimePoint::Milliseconds(val)); + } else { + return Err(FormatParseError {}); + } + } + let mut parts = s.split(':'); + let mut hrs = None; + let mut mins = None; + let mut secs = parts.next(); + if let Some(part) = parts.next() { + std::mem::swap(&mut mins, &mut secs); + secs = Some(part); + } + if let Some(part) = parts.next() { + std::mem::swap(&mut hrs, &mut mins); + std::mem::swap(&mut mins, &mut secs); + secs = Some(part); + } + if parts.next().is_some() { + return Err(FormatParseError {}); + } + let hours = if let Some(val) = hrs { + let ret = val.parse::(); + if ret.is_err() { return Err(FormatParseError {}); } + let val = ret.unwrap(); + if val > 1000 { return Err(FormatParseError {}); } + val + } else { 0 }; + let minutes = if let Some(val) = mins { + let ret = val.parse::(); + if ret.is_err() { return Err(FormatParseError {}); } + let val = ret.unwrap(); + if val >= 60 { return Err(FormatParseError {}); } + val + } else { 0 }; + let (seconds, millis) = if let Some(val) = secs { + let mut parts = val.split('.'); + let ret = parts.next().unwrap().parse::(); + if ret.is_err() { return Err(FormatParseError {}); } + let seconds = ret.unwrap(); + if mins.is_some() && seconds >= 60 { return Err(FormatParseError {}); } + let millis = if let Some(val) = parts.next() { + let mut mval = 0; + let mut base = 0; + for ch in val.chars() { + if ch >= '0' && ch <= '9' { + mval = mval * 10 + u64::from((ch as u8) - b'0'); + base += 1; + if base > 3 { break; } + } else { + return Err(FormatParseError {}); + } + } + while base < 3 { + mval *= 10; + base += 1; + } + mval + } else { 0 }; + (seconds, millis) + } else { unreachable!(); }; + let tot_secs = hours * 60 * 60 + minutes * 60 + seconds; + Ok(NATimePoint::Milliseconds(tot_secs * 1000 + millis)) + } else { + let str_b = s.as_bytes(); + let num = std::str::from_utf8(&str_b[..str_b.len() - 3]).unwrap(); + let ret = num.parse::(); + if let Ok(val) = ret { + Ok(NATimePoint::PTS(val)) + } else { + Err(FormatParseError {}) + } + } + } +} + +/// Decoded frame information. #[allow(dead_code)] #[derive(Clone)] pub struct NAFrame { - ts: NATimeInfo, - buffer: NABufferType, - info: NACodecInfoRef, - ftype: FrameType, - key: bool, - options: HashMap, + /// Frame timestamp. + pub ts: NATimeInfo, + /// Frame ID. + pub id: i64, + buffer: NABufferType, + info: NACodecInfoRef, + /// Frame type. + pub frame_type: FrameType, + /// Keyframe flag. + pub key: bool, +// options: HashMap, } +/// A specialised type for reference-counted `NAFrame`. pub type NAFrameRef = Arc; fn get_plane_size(info: &NAVideoInfo, idx: usize) -> (usize, usize) { @@ -693,35 +1206,59 @@ fn get_plane_size(info: &NAVideoInfo, idx: usize) -> (usize, usize) { } impl NAFrame { + /// Constructs a new `NAFrame` instance. pub fn new(ts: NATimeInfo, ftype: FrameType, keyframe: bool, info: NACodecInfoRef, - options: HashMap, + /*options: HashMap,*/ buffer: NABufferType) -> Self { - NAFrame { ts, buffer, info, ftype, key: keyframe, options } + NAFrame { ts, id: 0, buffer, info, frame_type: ftype, key: keyframe/*, options*/ } } + /// Returns frame format information. pub fn get_info(&self) -> NACodecInfoRef { self.info.clone() } - pub fn get_frame_type(&self) -> FrameType { self.ftype } + /// Returns frame type. + pub fn get_frame_type(&self) -> FrameType { self.frame_type } + /// Reports whether the frame is a keyframe. pub fn is_keyframe(&self) -> bool { self.key } - pub fn set_frame_type(&mut self, ftype: FrameType) { self.ftype = ftype; } + /// Sets new frame type. + pub fn set_frame_type(&mut self, ftype: FrameType) { self.frame_type = ftype; } + /// Sets keyframe flag. pub fn set_keyframe(&mut self, key: bool) { self.key = key; } + /// Returns frame timestamp. pub fn get_time_information(&self) -> NATimeInfo { self.ts } + /// Returns frame presentation time. pub fn get_pts(&self) -> Option { self.ts.get_pts() } + /// Returns frame decoding time. pub fn get_dts(&self) -> Option { self.ts.get_dts() } + /// Returns picture ID. + pub fn get_id(&self) -> i64 { self.id } + /// Returns frame display duration. pub fn get_duration(&self) -> Option { self.ts.get_duration() } + /// Sets new presentation timestamp. pub fn set_pts(&mut self, pts: Option) { self.ts.set_pts(pts); } + /// Sets new decoding timestamp. pub fn set_dts(&mut self, dts: Option) { self.ts.set_dts(dts); } + /// Sets new picture ID. + pub fn set_id(&mut self, id: i64) { self.id = id; } + /// Sets new duration. pub fn set_duration(&mut self, dur: Option) { self.ts.set_duration(dur); } + /// Returns a reference to the frame data. pub fn get_buffer(&self) -> NABufferType { self.buffer.clone() } + /// Converts current instance into a reference-counted one. pub fn into_ref(self) -> NAFrameRef { Arc::new(self) } + + /// Creates new frame with metadata from `NAPacket`. + pub fn new_from_pkt(pkt: &NAPacket, info: NACodecInfoRef, buf: NABufferType) -> NAFrame { + NAFrame::new(pkt.ts, FrameType::Other, pkt.keyframe, info, /*HashMap::new(),*/ buf) + } } impl fmt::Display for NAFrame { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let mut ostr = format!("frame type {}", self.ftype); + let mut ostr = format!("frame type {}", self.frame_type); if let Some(pts) = self.ts.pts { ostr = format!("{} pts {}", ostr, pts); } if let Some(dts) = self.ts.dts { ostr = format!("{} dts {}", ostr, dts); } if let Some(dur) = self.ts.duration { ostr = format!("{} duration {}", ostr, dur); } @@ -730,19 +1267,19 @@ impl fmt::Display for NAFrame { } } -/// Possible stream types. -#[derive(Debug,Clone,Copy)] +/// A list of possible stream types. +#[derive(Debug,Clone,Copy,PartialEq)] #[allow(dead_code)] pub enum StreamType { - /// video stream + /// Video stream. Video, - /// audio stream + /// Audio stream. Audio, - /// subtitles + /// Subtitles. Subtitles, - /// any data stream (or might be an unrecognized audio/video stream) + /// Any data stream (or might be an unrecognized audio/video stream). Data, - /// nonexistent stream + /// Nonexistent stream. None, } @@ -758,19 +1295,28 @@ impl fmt::Display for StreamType { } } +/// Stream data. #[allow(dead_code)] #[derive(Clone)] pub struct NAStream { - media_type: StreamType, - id: u32, - num: usize, - info: NACodecInfoRef, - tb_num: u32, - tb_den: u32, + media_type: StreamType, + /// Stream ID. + pub id: u32, + num: usize, + info: NACodecInfoRef, + /// Timebase numerator. + pub tb_num: u32, + /// Timebase denominator. + pub tb_den: u32, + /// Duration in timebase units (zero if not available). + pub duration: u64, } +/// A specialised reference-counted `NAStream` type. pub type NAStreamRef = Arc; +/// Downscales the timebase by its greatest common denominator. +#[allow(clippy::comparison_chain)] pub fn reduce_timebase(tb_num: u32, tb_den: u32) -> (u32, u32) { if tb_num == 0 { return (tb_num, tb_den); } if (tb_den % tb_num) == 0 { return (1, tb_den / tb_num); } @@ -787,20 +1333,32 @@ pub fn reduce_timebase(tb_num: u32, tb_den: u32) -> (u32, u32) { } impl NAStream { - pub fn new(mt: StreamType, id: u32, info: NACodecInfo, tb_num: u32, tb_den: u32) -> Self { + /// Constructs a new `NAStream` instance. + pub fn new(mt: StreamType, id: u32, info: NACodecInfo, tb_num: u32, tb_den: u32, duration: u64) -> Self { let (n, d) = reduce_timebase(tb_num, tb_den); - NAStream { media_type: mt, id, num: 0, info: info.into_ref(), tb_num: n, tb_den: d } + NAStream { media_type: mt, id, num: 0, info: info.into_ref(), tb_num: n, tb_den: d, duration } } + /// Returns stream id. pub fn get_id(&self) -> u32 { self.id } + /// Returns stream type. + pub fn get_media_type(&self) -> StreamType { self.media_type } + /// Returns stream number assigned by demuxer. pub fn get_num(&self) -> usize { self.num } + /// Sets stream number. pub fn set_num(&mut self, num: usize) { self.num = num; } + /// Returns codec information. pub fn get_info(&self) -> NACodecInfoRef { self.info.clone() } + /// Returns stream timebase. pub fn get_timebase(&self) -> (u32, u32) { (self.tb_num, self.tb_den) } + /// Sets new stream timebase. pub fn set_timebase(&mut self, tb_num: u32, tb_den: u32) { let (n, d) = reduce_timebase(tb_num, tb_den); self.tb_num = n; self.tb_den = d; } + /// Returns stream duration. + pub fn get_duration(&self) -> usize { self.num } + /// Converts current instance into a reference-counted one. pub fn into_ref(self) -> NAStreamRef { Arc::new(self) } } @@ -810,28 +1368,64 @@ impl fmt::Display for NAStream { } } +/// Side data that may accompany demuxed data. +#[derive(Clone)] +pub enum NASideData { + /// Palette information. + /// + /// This side data contains a flag signalling that palette has changed since previous time and a reference to the current palette. + /// Palette is stored in 8-bit RGBA format. + Palette(bool, Arc<[u8; 1024]>), + /// Generic user data. + UserData(Arc>), +} + +/// Packet with compressed data. #[allow(dead_code)] pub struct NAPacket { - stream: NAStreamRef, - ts: NATimeInfo, - buffer: NABufferRef>, - keyframe: bool, + stream: NAStreamRef, + /// Packet timestamp. + pub ts: NATimeInfo, + buffer: NABufferRef>, + /// Keyframe flag. + pub keyframe: bool, // options: HashMap>, + /// Packet side data (e.g. palette for paletted formats). + pub side_data: Vec, } impl NAPacket { + /// Constructs a new `NAPacket` instance. pub fn new(str: NAStreamRef, ts: NATimeInfo, kf: bool, vec: Vec) -> Self { // let mut vec: Vec = Vec::new(); // vec.resize(size, 0); - NAPacket { stream: str, ts, keyframe: kf, buffer: NABufferRef::new(vec) } + NAPacket { stream: str, ts, keyframe: kf, buffer: NABufferRef::new(vec), side_data: Vec::new() } } + /// Constructs a new `NAPacket` instance reusing a buffer reference. + pub fn new_from_refbuf(str: NAStreamRef, ts: NATimeInfo, kf: bool, buffer: NABufferRef>) -> Self { + NAPacket { stream: str, ts, keyframe: kf, buffer, side_data: Vec::new() } + } + /// Returns information about the stream packet belongs to. pub fn get_stream(&self) -> NAStreamRef { self.stream.clone() } + /// Returns packet timestamp. pub fn get_time_information(&self) -> NATimeInfo { self.ts } + /// Returns packet presentation timestamp. pub fn get_pts(&self) -> Option { self.ts.get_pts() } + /// Returns packet decoding timestamp. pub fn get_dts(&self) -> Option { self.ts.get_dts() } + /// Returns packet duration. pub fn get_duration(&self) -> Option { self.ts.get_duration() } + /// Reports whether this is a keyframe packet. pub fn is_keyframe(&self) -> bool { self.keyframe } + /// Returns a reference to packet data. pub fn get_buffer(&self) -> NABufferRef> { self.buffer.clone() } + /// Adds side data for a packet. + pub fn add_side_data(&mut self, side_data: NASideData) { self.side_data.push(side_data); } + /// Assigns packet to a new stream. + pub fn reassign(&mut self, str: NAStreamRef, ts: NATimeInfo) { + self.stream = str; + self.ts = ts; + } } impl Drop for NAPacket { @@ -850,17 +1444,20 @@ impl fmt::Display for NAPacket { } } -pub trait FrameFromPacket { - fn new_from_pkt(pkt: &NAPacket, info: NACodecInfoRef, buf: NABufferType) -> NAFrame; - fn fill_timestamps(&mut self, pkt: &NAPacket); -} - -impl FrameFromPacket for NAFrame { - fn new_from_pkt(pkt: &NAPacket, info: NACodecInfoRef, buf: NABufferType) -> NAFrame { - NAFrame::new(pkt.ts, FrameType::Other, pkt.keyframe, info, HashMap::new(), buf) - } - fn fill_timestamps(&mut self, pkt: &NAPacket) { - self.ts = pkt.get_time_information(); +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn test_time_parse() { + assert_eq!(NATimePoint::PTS(42).to_string(), "42pts"); + assert_eq!(NATimePoint::Milliseconds(4242000).to_string(), "1:10:42"); + assert_eq!(NATimePoint::Milliseconds(42424242).to_string(), "11:47:04.242"); + let ret = NATimePoint::from_str("42pts"); + assert_eq!(ret.unwrap(), NATimePoint::PTS(42)); + let ret = NATimePoint::from_str("1:2:3"); + assert_eq!(ret.unwrap(), NATimePoint::Milliseconds(3723000)); + let ret = NATimePoint::from_str("1:2:3.42"); + assert_eq!(ret.unwrap(), NATimePoint::Milliseconds(3723420)); } } -