+#[derive(Clone)]
+pub struct NAVideoBuffer<T> {
+ info: NAVideoInfo,
+ data: NABufferRefT<T>,
+ offs: Vec<usize>,
+ strides: Vec<usize>,
+}
+
+impl<T: Clone> NAVideoBuffer<T> {
+ pub fn get_offset(&self, idx: usize) -> usize {
+ if idx >= self.offs.len() { 0 }
+ else { self.offs[idx] }
+ }
+ pub fn get_info(&self) -> NAVideoInfo { self.info }
+ pub fn get_data(&self) -> Ref<Vec<T>> { self.data.borrow() }
+ pub fn get_data_mut(&mut self) -> RefMut<Vec<T>> { self.data.borrow_mut() }
+ pub fn copy_buffer(&mut self) -> Self {
+ let mut data: Vec<T> = Vec::with_capacity(self.data.borrow().len());
+ data.clone_from(self.data.borrow().as_ref());
+ let mut offs: Vec<usize> = Vec::with_capacity(self.offs.len());
+ offs.clone_from(&self.offs);
+ let mut strides: Vec<usize> = Vec::with_capacity(self.strides.len());
+ strides.clone_from(&self.strides);
+ NAVideoBuffer { info: self.info, data: Rc::new(RefCell::new(data)), offs: offs, strides: strides }
+ }
+ pub fn get_stride(&self, idx: usize) -> usize {
+ if idx >= self.strides.len() { return 0; }
+ self.strides[idx]
+ }
+ pub fn get_dimensions(&self, idx: usize) -> (usize, usize) {
+ get_plane_size(&self.info, idx)
+ }
+}
+
+#[derive(Clone)]
+pub struct NAAudioBuffer<T> {
+ info: NAAudioInfo,
+ data: NABufferRefT<T>,
+ offs: Vec<usize>,
+ chmap: NAChannelMap,
+}
+
+impl<T: Clone> NAAudioBuffer<T> {
+ pub fn get_offset(&self, idx: usize) -> usize {
+ if idx >= self.offs.len() { 0 }
+ else { self.offs[idx] }
+ }
+ pub fn get_info(&self) -> NAAudioInfo { self.info }
+ pub fn get_chmap(&self) -> NAChannelMap { self.chmap.clone() }
+ pub fn get_data(&self) -> Ref<Vec<T>> { self.data.borrow() }
+ pub fn get_data_mut(&mut self) -> RefMut<Vec<T>> { self.data.borrow_mut() }
+ pub fn copy_buffer(&mut self) -> Self {
+ let mut data: Vec<T> = Vec::with_capacity(self.data.borrow().len());
+ data.clone_from(self.data.borrow().as_ref());
+ let mut offs: Vec<usize> = Vec::with_capacity(self.offs.len());
+ offs.clone_from(&self.offs);
+ NAAudioBuffer { info: self.info, data: Rc::new(RefCell::new(data)), offs: offs, chmap: self.get_chmap() }
+ }
+}
+
+impl NAAudioBuffer<u8> {
+ pub fn new_from_buf(info: NAAudioInfo, data: NABufferRefT<u8>, chmap: NAChannelMap) -> Self {
+ NAAudioBuffer { info: info, data: data, chmap: chmap, offs: Vec::new() }
+ }
+}
+
+#[derive(Clone)]
+pub enum NABufferType {
+ Video (NAVideoBuffer<u8>),
+ Video16 (NAVideoBuffer<u16>),
+ VideoPacked(NAVideoBuffer<u8>),
+ AudioU8 (NAAudioBuffer<u8>),
+ AudioI16 (NAAudioBuffer<i16>),
+ AudioI32 (NAAudioBuffer<i32>),
+ AudioF32 (NAAudioBuffer<f32>),
+ AudioPacked(NAAudioBuffer<u8>),
+ Data (NABufferRefT<u8>),
+ None,
+}
+
+impl NABufferType {
+ pub fn get_offset(&self, idx: usize) -> usize {
+ match *self {
+ NABufferType::Video(ref vb) => vb.get_offset(idx),
+ NABufferType::Video16(ref vb) => vb.get_offset(idx),
+ NABufferType::VideoPacked(ref vb) => vb.get_offset(idx),
+ NABufferType::AudioU8(ref ab) => ab.get_offset(idx),
+ NABufferType::AudioI16(ref ab) => ab.get_offset(idx),
+ NABufferType::AudioF32(ref ab) => ab.get_offset(idx),
+ NABufferType::AudioPacked(ref ab) => ab.get_offset(idx),
+ _ => 0,
+ }
+ }
+ pub fn get_vbuf(&mut self) -> Option<NAVideoBuffer<u8>> {
+ match *self {
+ NABufferType::Video(ref vb) => Some(vb.clone()),
+ NABufferType::VideoPacked(ref vb) => Some(vb.clone()),
+ _ => None,
+ }
+ }
+ pub fn get_vbuf16(&mut self) -> Option<NAVideoBuffer<u16>> {
+ match *self {
+ NABufferType::Video16(ref vb) => Some(vb.clone()),
+ _ => None,
+ }
+ }
+ pub fn get_abuf_u8(&mut self) -> Option<NAAudioBuffer<u8>> {
+ match *self {
+ NABufferType::AudioU8(ref ab) => Some(ab.clone()),
+ NABufferType::AudioPacked(ref ab) => Some(ab.clone()),
+ _ => None,
+ }
+ }
+ pub fn get_abuf_i16(&mut self) -> Option<NAAudioBuffer<i16>> {
+ match *self {
+ NABufferType::AudioI16(ref ab) => Some(ab.clone()),
+ _ => None,
+ }
+ }
+ pub fn get_abuf_i32(&mut self) -> Option<NAAudioBuffer<i32>> {
+ match *self {
+ NABufferType::AudioI32(ref ab) => Some(ab.clone()),
+ _ => None,
+ }
+ }
+ pub fn get_abuf_f32(&mut self) -> Option<NAAudioBuffer<f32>> {
+ match *self {
+ NABufferType::AudioF32(ref ab) => Some(ab.clone()),
+ _ => None,
+ }
+ }
+}
+
+#[derive(Debug,Clone,Copy,PartialEq)]
+pub enum AllocatorError {
+ TooLargeDimensions,
+ FormatError,
+}
+
+pub fn alloc_video_buffer(vinfo: NAVideoInfo, align: u8) -> Result<NABufferType, AllocatorError> {
+ let fmt = &vinfo.format;
+ let mut new_size: usize = 0;
+ let mut offs: Vec<usize> = Vec::new();
+ let mut strides: Vec<usize> = Vec::new();
+
+ for i in 0..fmt.get_num_comp() {
+ if fmt.get_chromaton(i) == None { return Err(AllocatorError::FormatError); }
+ }
+
+ let align_mod = ((1 << align) as usize) - 1;
+ let width = ((vinfo.width as usize) + align_mod) & !align_mod;
+ let height = ((vinfo.height as usize) + align_mod) & !align_mod;
+ let mut max_depth = 0;
+ let mut all_packed = true;
+ for i in 0..fmt.get_num_comp() {
+ let ochr = fmt.get_chromaton(i);
+ if let None = ochr { continue; }
+ let chr = ochr.unwrap();
+ if !chr.is_packed() {
+ all_packed = false;
+ break;
+ }
+ max_depth = max(max_depth, chr.get_depth());
+ }
+
+//todo semi-packed like NV12
+ if fmt.is_paletted() {
+//todo various-sized palettes?
+ let stride = vinfo.get_format().get_chromaton(0).unwrap().get_linesize(width);
+ let pic_sz = stride.checked_mul(height);
+ if pic_sz == None { return Err(AllocatorError::TooLargeDimensions); }
+ let pal_size = 256 * (fmt.get_elem_size() as usize);
+ let new_size = pic_sz.unwrap().checked_add(pal_size);
+ if new_size == None { return Err(AllocatorError::TooLargeDimensions); }
+ offs.push(0);
+ offs.push(stride * height);
+ strides.push(stride);
+ let mut data: Vec<u8> = Vec::with_capacity(new_size.unwrap());
+ data.resize(new_size.unwrap(), 0);
+ let buf: NAVideoBuffer<u8> = NAVideoBuffer { data: Rc::new(RefCell::new(data)), info: vinfo, offs: offs, strides: strides };
+ Ok(NABufferType::Video(buf))
+ } else if !all_packed {
+ for i in 0..fmt.get_num_comp() {
+ let ochr = fmt.get_chromaton(i);
+ if let None = ochr { continue; }
+ let chr = ochr.unwrap();
+ if !vinfo.is_flipped() {
+ offs.push(new_size as usize);
+ }
+ let stride = chr.get_linesize(width);
+ let cur_h = chr.get_height(height);
+ let cur_sz = stride.checked_mul(cur_h);
+ if cur_sz == None { return Err(AllocatorError::TooLargeDimensions); }
+ let new_sz = new_size.checked_add(cur_sz.unwrap());
+ if new_sz == None { return Err(AllocatorError::TooLargeDimensions); }
+ new_size = new_sz.unwrap();
+ if vinfo.is_flipped() {
+ offs.push(new_size as usize);
+ }
+ strides.push(stride);
+ }
+ if max_depth <= 8 {
+ let mut data: Vec<u8> = Vec::with_capacity(new_size);
+ data.resize(new_size, 0);
+ let buf: NAVideoBuffer<u8> = NAVideoBuffer { data: Rc::new(RefCell::new(data)), info: vinfo, offs: offs, strides: strides };
+ Ok(NABufferType::Video(buf))
+ } else {
+ let mut data: Vec<u16> = Vec::with_capacity(new_size);
+ data.resize(new_size, 0);
+ let buf: NAVideoBuffer<u16> = NAVideoBuffer { data: Rc::new(RefCell::new(data)), info: vinfo, offs: offs, strides: strides };
+ Ok(NABufferType::Video16(buf))
+ }
+ } else {
+ let elem_sz = fmt.get_elem_size();
+ let line_sz = width.checked_mul(elem_sz as usize);
+ if line_sz == None { return Err(AllocatorError::TooLargeDimensions); }
+ let new_sz = line_sz.unwrap().checked_mul(height);
+ if new_sz == None { return Err(AllocatorError::TooLargeDimensions); }
+ new_size = new_sz.unwrap();
+ let mut data: Vec<u8> = Vec::with_capacity(new_size);
+ data.resize(new_size, 0);
+ strides.push(line_sz.unwrap());
+ let buf: NAVideoBuffer<u8> = NAVideoBuffer { data: Rc::new(RefCell::new(data)), info: vinfo, offs: offs, strides: strides };
+ Ok(NABufferType::VideoPacked(buf))
+ }
+}
+
+pub fn alloc_audio_buffer(ainfo: NAAudioInfo, nsamples: usize, chmap: NAChannelMap) -> Result<NABufferType, AllocatorError> {
+ let mut offs: Vec<usize> = Vec::new();
+ if ainfo.format.is_planar() {
+ let len = nsamples.checked_mul(ainfo.channels as usize);
+ if len == None { return Err(AllocatorError::TooLargeDimensions); }
+ let length = len.unwrap();
+ for i in 0..ainfo.channels {
+ offs.push((i as usize) * nsamples);
+ }
+ if ainfo.format.is_float() {
+ if ainfo.format.get_bits() == 32 {
+ let mut data: Vec<f32> = Vec::with_capacity(length);
+ data.resize(length, 0.0);
+ let buf: NAAudioBuffer<f32> = NAAudioBuffer { data: Rc::new(RefCell::new(data)), info: ainfo, offs: offs, chmap: chmap };
+ Ok(NABufferType::AudioF32(buf))
+ } else {
+ Err(AllocatorError::TooLargeDimensions)
+ }
+ } else {
+ if ainfo.format.get_bits() == 8 && !ainfo.format.is_signed() {
+ let mut data: Vec<u8> = Vec::with_capacity(length);
+ data.resize(length, 0);
+ let buf: NAAudioBuffer<u8> = NAAudioBuffer { data: Rc::new(RefCell::new(data)), info: ainfo, offs: offs, chmap: chmap };
+ Ok(NABufferType::AudioU8(buf))
+ } else if ainfo.format.get_bits() == 16 && ainfo.format.is_signed() {
+ let mut data: Vec<i16> = Vec::with_capacity(length);
+ data.resize(length, 0);
+ let buf: NAAudioBuffer<i16> = NAAudioBuffer { data: Rc::new(RefCell::new(data)), info: ainfo, offs: offs, chmap: chmap };
+ Ok(NABufferType::AudioI16(buf))
+ } else {
+ Err(AllocatorError::TooLargeDimensions)
+ }
+ }
+ } else {
+ let len = nsamples.checked_mul(ainfo.channels as usize);
+ if len == None { return Err(AllocatorError::TooLargeDimensions); }
+ let length = ainfo.format.get_audio_size(len.unwrap() as u64);
+ let mut data: Vec<u8> = Vec::with_capacity(length);
+ data.resize(length, 0);
+ let buf: NAAudioBuffer<u8> = NAAudioBuffer { data: Rc::new(RefCell::new(data)), info: ainfo, offs: offs, chmap: chmap };
+ Ok(NABufferType::AudioPacked(buf))
+ }
+}
+
+pub fn alloc_data_buffer(size: usize) -> Result<NABufferType, AllocatorError> {
+ let mut data: Vec<u8> = Vec::with_capacity(size);
+ data.resize(size, 0);
+ let buf: NABufferRefT<u8> = Rc::new(RefCell::new(data));
+ Ok(NABufferType::Data(buf))
+}
+
+pub fn copy_buffer(buf: NABufferType) -> NABufferType {
+ buf.clone()