use std::cmp::max;
use std::collections::HashMap;
use std::fmt;
-pub use std::rc::Rc;
-pub use std::cell::*;
use std::sync::Arc;
pub use crate::formats::*;
pub use crate::refs::*;
pub fn get_dimensions(&self, idx: usize) -> (usize, usize) {
get_plane_size(&self.info, idx)
}
+ pub fn into_ref(self) -> NABufferRef<Self> {
+ NABufferRef::new(self)
+ }
}
+pub type NAVideoBufferRef<T> = NABufferRef<NAVideoBuffer<T>>;
+
#[derive(Clone)]
pub struct NAAudioBuffer<T> {
info: NAAudioInfo,
#[derive(Clone)]
pub enum NABufferType {
- Video (NAVideoBuffer<u8>),
- Video16 (NAVideoBuffer<u16>),
- Video32 (NAVideoBuffer<u32>),
- VideoPacked(NAVideoBuffer<u8>),
+ Video (NAVideoBufferRef<u8>),
+ Video16 (NAVideoBufferRef<u16>),
+ Video32 (NAVideoBufferRef<u32>),
+ VideoPacked(NAVideoBufferRef<u8>),
AudioU8 (NAAudioBuffer<u8>),
AudioI16 (NAAudioBuffer<i16>),
AudioI32 (NAAudioBuffer<i32>),
_ => None,
}
}
- pub fn get_vbuf(&self) -> Option<NAVideoBuffer<u8>> {
+ pub fn get_vbuf(&self) -> Option<NAVideoBufferRef<u8>> {
match *self {
NABufferType::Video(ref vb) => Some(vb.clone()),
NABufferType::VideoPacked(ref vb) => Some(vb.clone()),
_ => None,
}
}
- pub fn get_vbuf16(&self) -> Option<NAVideoBuffer<u16>> {
+ pub fn get_vbuf16(&self) -> Option<NAVideoBufferRef<u16>> {
match *self {
NABufferType::Video16(ref vb) => Some(vb.clone()),
_ => None,
}
}
- pub fn get_vbuf32(&self) -> Option<NAVideoBuffer<u32>> {
+ pub fn get_vbuf32(&self) -> Option<NAVideoBufferRef<u32>> {
match *self {
NABufferType::Video32(ref vb) => Some(vb.clone()),
_ => None,
let mut data: Vec<u8> = Vec::with_capacity(new_size.unwrap());
data.resize(new_size.unwrap(), 0);
let buf: NAVideoBuffer<u8> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs: offs, strides: strides };
- Ok(NABufferType::Video(buf))
+ Ok(NABufferType::Video(buf.into_ref()))
} else if !all_packed {
for i in 0..fmt.get_num_comp() {
let ochr = fmt.get_chromaton(i);
let mut data: Vec<u8> = Vec::with_capacity(new_size);
data.resize(new_size, 0);
let buf: NAVideoBuffer<u8> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs: offs, strides: strides };
- Ok(NABufferType::Video(buf))
+ Ok(NABufferType::Video(buf.into_ref()))
} else if max_depth <= 16 {
let mut data: Vec<u16> = Vec::with_capacity(new_size);
data.resize(new_size, 0);
let buf: NAVideoBuffer<u16> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs: offs, strides: strides };
- Ok(NABufferType::Video16(buf))
+ Ok(NABufferType::Video16(buf.into_ref()))
} else {
let mut data: Vec<u32> = Vec::with_capacity(new_size);
data.resize(new_size, 0);
let buf: NAVideoBuffer<u32> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs: offs, strides: strides };
- Ok(NABufferType::Video32(buf))
+ Ok(NABufferType::Video32(buf.into_ref()))
}
} else if all_bytealigned || unfit_elem_size {
let elem_sz = fmt.get_elem_size();
data.resize(new_size, 0);
strides.push(line_sz.unwrap());
let buf: NAVideoBuffer<u8> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs: offs, strides: strides };
- Ok(NABufferType::VideoPacked(buf))
+ Ok(NABufferType::VideoPacked(buf.into_ref()))
} else {
let elem_sz = fmt.get_elem_size();
let new_sz = width.checked_mul(height);
data.resize(new_size, 0);
strides.push(width);
let buf: NAVideoBuffer<u16> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs: offs, strides: strides };
- Ok(NABufferType::Video16(buf))
+ Ok(NABufferType::Video16(buf.into_ref()))
},
4 => {
let mut data: Vec<u32> = Vec::with_capacity(new_size);
data.resize(new_size, 0);
strides.push(width);
let buf: NAVideoBuffer<u32> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs: offs, strides: strides };
- Ok(NABufferType::Video32(buf))
+ Ok(NABufferType::Video32(buf.into_ref()))
},
_ => unreachable!(),
}
options: HashMap<String, NAValue>,
}
-pub type NAFrameRef = Rc<RefCell<NAFrame>>;
+pub type NAFrameRef = Arc<NAFrame>;
fn get_plane_size(info: &NAVideoInfo, idx: usize) -> (usize, usize) {
let chromaton = info.get_format().get_chromaton(idx);
pub fn set_duration(&mut self, dur: Option<u64>) { self.ts.set_duration(dur); }
pub fn get_buffer(&self) -> NABufferType { self.buffer.clone() }
+
+ pub fn into_ref(self) -> NAFrameRef { Arc::new(self) }
}
impl fmt::Display for NAFrame {
tb_den: u32,
}
+pub type NAStreamRef = Arc<NAStream>;
+
pub fn reduce_timebase(tb_num: u32, tb_den: u32) -> (u32, u32) {
if tb_num == 0 { return (tb_num, tb_den); }
if (tb_den % tb_num) == 0 { return (1, tb_den / tb_num); }
self.tb_num = n;
self.tb_den = d;
}
+ pub fn into_ref(self) -> NAStreamRef { Arc::new(self) }
}
impl fmt::Display for NAStream {
#[allow(dead_code)]
pub struct NAPacket {
- stream: Rc<NAStream>,
+ stream: NAStreamRef,
ts: NATimeInfo,
buffer: NABufferRef<Vec<u8>>,
keyframe: bool,
}
impl NAPacket {
- pub fn new(str: Rc<NAStream>, ts: NATimeInfo, kf: bool, vec: Vec<u8>) -> Self {
+ pub fn new(str: NAStreamRef, ts: NATimeInfo, kf: bool, vec: Vec<u8>) -> Self {
// let mut vec: Vec<u8> = Vec::new();
// vec.resize(size, 0);
NAPacket { stream: str, ts: ts, keyframe: kf, buffer: NABufferRef::new(vec) }
}
- pub fn get_stream(&self) -> Rc<NAStream> { self.stream.clone() }
+ pub fn get_stream(&self) -> NAStreamRef { self.stream.clone() }
pub fn get_time_information(&self) -> NATimeInfo { self.ts }
pub fn get_pts(&self) -> Option<u64> { self.ts.get_pts() }
pub fn get_dts(&self) -> Option<u64> { self.ts.get_dts() }