_ => None,
}
}
+ pub fn is_video(&self) -> bool {
+ match *self {
+ NACodecTypeInfo::Video(_) => true,
+ _ => false,
+ }
+ }
+ pub fn is_audio(&self) -> bool {
+ match *self {
+ NACodecTypeInfo::Audio(_) => true,
+ _ => false,
+ }
+ }
}
impl fmt::Display for NACodecTypeInfo {
#[derive(Clone)]
pub struct NAVideoBuffer<T> {
- info: NAVideoInfo,
- data: NABufferRefT<T>,
- offs: Vec<usize>,
+ info: NAVideoInfo,
+ data: NABufferRefT<T>,
+ offs: Vec<usize>,
+ strides: Vec<usize>,
}
impl<T: Clone> NAVideoBuffer<T> {
data.clone_from(self.data.borrow().as_ref());
let mut offs: Vec<usize> = Vec::with_capacity(self.offs.len());
offs.clone_from(&self.offs);
- NAVideoBuffer { info: self.info, data: Rc::new(RefCell::new(data)), offs: offs }
+ let mut strides: Vec<usize> = Vec::with_capacity(self.strides.len());
+ strides.clone_from(&self.strides);
+ NAVideoBuffer { info: self.info, data: Rc::new(RefCell::new(data)), offs: offs, strides: strides }
}
pub fn get_stride(&self, idx: usize) -> usize {
- if idx >= self.info.get_format().get_num_comp() { return 0; }
- self.info.get_format().get_chromaton(idx).unwrap().get_linesize(self.info.get_width())
+ if idx >= self.strides.len() { return 0; }
+ self.strides[idx]
}
pub fn get_dimensions(&self, idx: usize) -> (usize, usize) {
get_plane_size(&self.info, idx)
data: NABufferRefT<T>,
offs: Vec<usize>,
chmap: NAChannelMap,
+ len: usize,
}
impl<T: Clone> NAAudioBuffer<T> {
data.clone_from(self.data.borrow().as_ref());
let mut offs: Vec<usize> = Vec::with_capacity(self.offs.len());
offs.clone_from(&self.offs);
- NAAudioBuffer { info: self.info, data: Rc::new(RefCell::new(data)), offs: offs, chmap: self.get_chmap() }
+ NAAudioBuffer { info: self.info, data: Rc::new(RefCell::new(data)), offs: offs, chmap: self.get_chmap(), len: self.len }
}
+ pub fn get_length(&self) -> usize { self.len }
}
impl NAAudioBuffer<u8> {
pub fn new_from_buf(info: NAAudioInfo, data: NABufferRefT<u8>, chmap: NAChannelMap) -> Self {
- NAAudioBuffer { info: info, data: data, chmap: chmap, offs: Vec::new() }
+ let len = data.borrow().len();
+ NAAudioBuffer { info: info, data: data, chmap: chmap, offs: Vec::new(), len: len }
}
}
pub fn alloc_video_buffer(vinfo: NAVideoInfo, align: u8) -> Result<NABufferType, AllocatorError> {
let fmt = &vinfo.format;
let mut new_size: usize = 0;
- let mut offs: Vec<usize> = Vec::new();
+ let mut offs: Vec<usize> = Vec::new();
+ let mut strides: Vec<usize> = Vec::new();
for i in 0..fmt.get_num_comp() {
if fmt.get_chromaton(i) == None { return Err(AllocatorError::FormatError); }
let mut max_depth = 0;
let mut all_packed = true;
for i in 0..fmt.get_num_comp() {
- let chr = fmt.get_chromaton(i).unwrap();
+ let ochr = fmt.get_chromaton(i);
+ if let None = ochr { continue; }
+ let chr = ochr.unwrap();
if !chr.is_packed() {
all_packed = false;
break;
//todo semi-packed like NV12
if fmt.is_paletted() {
//todo various-sized palettes?
- let pic_sz = width.checked_mul(height);
+ let stride = vinfo.get_format().get_chromaton(0).unwrap().get_linesize(width);
+ let pic_sz = stride.checked_mul(height);
if pic_sz == None { return Err(AllocatorError::TooLargeDimensions); }
let pal_size = 256 * (fmt.get_elem_size() as usize);
let new_size = pic_sz.unwrap().checked_add(pal_size);
if new_size == None { return Err(AllocatorError::TooLargeDimensions); }
offs.push(0);
- offs.push(width * height);
+ offs.push(stride * height);
+ strides.push(stride);
let mut data: Vec<u8> = Vec::with_capacity(new_size.unwrap());
data.resize(new_size.unwrap(), 0);
- let buf: NAVideoBuffer<u8> = NAVideoBuffer { data: Rc::new(RefCell::new(data)), info: vinfo, offs: offs };
+ let buf: NAVideoBuffer<u8> = NAVideoBuffer { data: Rc::new(RefCell::new(data)), info: vinfo, offs: offs, strides: strides };
Ok(NABufferType::Video(buf))
} else if !all_packed {
for i in 0..fmt.get_num_comp() {
- let chr = fmt.get_chromaton(i).unwrap();
+ let ochr = fmt.get_chromaton(i);
+ if let None = ochr { continue; }
+ let chr = ochr.unwrap();
if !vinfo.is_flipped() {
offs.push(new_size as usize);
}
- let cur_w = chr.get_width(width);
+ let stride = chr.get_linesize(width);
let cur_h = chr.get_height(height);
- let cur_sz = cur_w.checked_mul(cur_h);
+ let cur_sz = stride.checked_mul(cur_h);
if cur_sz == None { return Err(AllocatorError::TooLargeDimensions); }
let new_sz = new_size.checked_add(cur_sz.unwrap());
if new_sz == None { return Err(AllocatorError::TooLargeDimensions); }
if vinfo.is_flipped() {
offs.push(new_size as usize);
}
+ strides.push(stride);
}
if max_depth <= 8 {
let mut data: Vec<u8> = Vec::with_capacity(new_size);
data.resize(new_size, 0);
- let buf: NAVideoBuffer<u8> = NAVideoBuffer { data: Rc::new(RefCell::new(data)), info: vinfo, offs: offs };
+ let buf: NAVideoBuffer<u8> = NAVideoBuffer { data: Rc::new(RefCell::new(data)), info: vinfo, offs: offs, strides: strides };
Ok(NABufferType::Video(buf))
} else {
let mut data: Vec<u16> = Vec::with_capacity(new_size);
data.resize(new_size, 0);
- let buf: NAVideoBuffer<u16> = NAVideoBuffer { data: Rc::new(RefCell::new(data)), info: vinfo, offs: offs };
+ let buf: NAVideoBuffer<u16> = NAVideoBuffer { data: Rc::new(RefCell::new(data)), info: vinfo, offs: offs, strides: strides };
Ok(NABufferType::Video16(buf))
}
} else {
new_size = new_sz.unwrap();
let mut data: Vec<u8> = Vec::with_capacity(new_size);
data.resize(new_size, 0);
- let buf: NAVideoBuffer<u8> = NAVideoBuffer { data: Rc::new(RefCell::new(data)), info: vinfo, offs: offs };
+ strides.push(line_sz.unwrap());
+ let buf: NAVideoBuffer<u8> = NAVideoBuffer { data: Rc::new(RefCell::new(data)), info: vinfo, offs: offs, strides: strides };
Ok(NABufferType::VideoPacked(buf))
}
}
if ainfo.format.get_bits() == 32 {
let mut data: Vec<f32> = Vec::with_capacity(length);
data.resize(length, 0.0);
- let buf: NAAudioBuffer<f32> = NAAudioBuffer { data: Rc::new(RefCell::new(data)), info: ainfo, offs: offs, chmap: chmap };
+ let buf: NAAudioBuffer<f32> = NAAudioBuffer { data: Rc::new(RefCell::new(data)), info: ainfo, offs: offs, chmap: chmap, len: nsamples };
Ok(NABufferType::AudioF32(buf))
} else {
Err(AllocatorError::TooLargeDimensions)
if ainfo.format.get_bits() == 8 && !ainfo.format.is_signed() {
let mut data: Vec<u8> = Vec::with_capacity(length);
data.resize(length, 0);
- let buf: NAAudioBuffer<u8> = NAAudioBuffer { data: Rc::new(RefCell::new(data)), info: ainfo, offs: offs, chmap: chmap };
+ let buf: NAAudioBuffer<u8> = NAAudioBuffer { data: Rc::new(RefCell::new(data)), info: ainfo, offs: offs, chmap: chmap, len: nsamples };
Ok(NABufferType::AudioU8(buf))
} else if ainfo.format.get_bits() == 16 && ainfo.format.is_signed() {
let mut data: Vec<i16> = Vec::with_capacity(length);
data.resize(length, 0);
- let buf: NAAudioBuffer<i16> = NAAudioBuffer { data: Rc::new(RefCell::new(data)), info: ainfo, offs: offs, chmap: chmap };
+ let buf: NAAudioBuffer<i16> = NAAudioBuffer { data: Rc::new(RefCell::new(data)), info: ainfo, offs: offs, chmap: chmap, len: nsamples };
Ok(NABufferType::AudioI16(buf))
} else {
Err(AllocatorError::TooLargeDimensions)
let length = ainfo.format.get_audio_size(len.unwrap() as u64);
let mut data: Vec<u8> = Vec::with_capacity(length);
data.resize(length, 0);
- let buf: NAAudioBuffer<u8> = NAAudioBuffer { data: Rc::new(RefCell::new(data)), info: ainfo, offs: offs, chmap: chmap };
+ let buf: NAAudioBuffer<u8> = NAAudioBuffer { data: Rc::new(RefCell::new(data)), info: ainfo, offs: offs, chmap: chmap, len: nsamples };
Ok(NABufferType::AudioPacked(buf))
}
}
if let NACodecTypeInfo::Audio(_) = self.properties { return true; }
false
}
+ pub fn new_dummy() -> Rc<Self> {
+ Rc::new(DUMMY_CODEC_INFO)
+ }
+ pub fn replace_info(&self, p: NACodecTypeInfo) -> Rc<Self> {
+ Rc::new(NACodecInfo { name: self.name, properties: p, extradata: self.extradata.clone() })
+ }
}
impl fmt::Display for NACodecInfo {
buffer: NABufferType) -> Self {
NAFrame { ts: ts, buffer: buffer, info: info, ftype: ftype, key: keyframe, options: options }
}
+ pub fn get_info(&self) -> Rc<NACodecInfo> { self.info.clone() }
pub fn get_frame_type(&self) -> FrameType { self.ftype }
pub fn is_keyframe(&self) -> bool { self.key }
pub fn set_frame_type(&mut self, ftype: FrameType) { self.ftype = ftype; }