introduce NADecoderSupport and buffer pools
[nihav.git] / nihav-core / src / frame.rs
1 use std::cmp::max;
2 use std::collections::HashMap;
3 use std::fmt;
4 use std::sync::Arc;
5 pub use crate::formats::*;
6 pub use crate::refs::*;
7
8 #[allow(dead_code)]
9 #[derive(Clone,Copy,PartialEq)]
10 pub struct NAAudioInfo {
11 sample_rate: u32,
12 channels: u8,
13 format: NASoniton,
14 block_len: usize,
15 }
16
17 impl NAAudioInfo {
18 pub fn new(sr: u32, ch: u8, fmt: NASoniton, bl: usize) -> Self {
19 NAAudioInfo { sample_rate: sr, channels: ch, format: fmt, block_len: bl }
20 }
21 pub fn get_sample_rate(&self) -> u32 { self.sample_rate }
22 pub fn get_channels(&self) -> u8 { self.channels }
23 pub fn get_format(&self) -> NASoniton { self.format }
24 pub fn get_block_len(&self) -> usize { self.block_len }
25 }
26
27 impl fmt::Display for NAAudioInfo {
28 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
29 write!(f, "{} Hz, {} ch", self.sample_rate, self.channels)
30 }
31 }
32
33 #[allow(dead_code)]
34 #[derive(Clone,Copy,PartialEq)]
35 pub struct NAVideoInfo {
36 width: usize,
37 height: usize,
38 flipped: bool,
39 format: NAPixelFormaton,
40 }
41
42 impl NAVideoInfo {
43 pub fn new(w: usize, h: usize, flip: bool, fmt: NAPixelFormaton) -> Self {
44 NAVideoInfo { width: w, height: h, flipped: flip, format: fmt }
45 }
46 pub fn get_width(&self) -> usize { self.width as usize }
47 pub fn get_height(&self) -> usize { self.height as usize }
48 pub fn is_flipped(&self) -> bool { self.flipped }
49 pub fn get_format(&self) -> NAPixelFormaton { self.format }
50 pub fn set_width(&mut self, w: usize) { self.width = w; }
51 pub fn set_height(&mut self, h: usize) { self.height = h; }
52 }
53
54 impl fmt::Display for NAVideoInfo {
55 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
56 write!(f, "{}x{}", self.width, self.height)
57 }
58 }
59
60 #[derive(Clone,Copy,PartialEq)]
61 pub enum NACodecTypeInfo {
62 None,
63 Audio(NAAudioInfo),
64 Video(NAVideoInfo),
65 }
66
67 impl NACodecTypeInfo {
68 pub fn get_video_info(&self) -> Option<NAVideoInfo> {
69 match *self {
70 NACodecTypeInfo::Video(vinfo) => Some(vinfo),
71 _ => None,
72 }
73 }
74 pub fn get_audio_info(&self) -> Option<NAAudioInfo> {
75 match *self {
76 NACodecTypeInfo::Audio(ainfo) => Some(ainfo),
77 _ => None,
78 }
79 }
80 pub fn is_video(&self) -> bool {
81 match *self {
82 NACodecTypeInfo::Video(_) => true,
83 _ => false,
84 }
85 }
86 pub fn is_audio(&self) -> bool {
87 match *self {
88 NACodecTypeInfo::Audio(_) => true,
89 _ => false,
90 }
91 }
92 }
93
94 impl fmt::Display for NACodecTypeInfo {
95 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
96 let ret = match *self {
97 NACodecTypeInfo::None => format!(""),
98 NACodecTypeInfo::Audio(fmt) => format!("{}", fmt),
99 NACodecTypeInfo::Video(fmt) => format!("{}", fmt),
100 };
101 write!(f, "{}", ret)
102 }
103 }
104
105 #[derive(Clone)]
106 pub struct NAVideoBuffer<T> {
107 info: NAVideoInfo,
108 data: NABufferRef<Vec<T>>,
109 offs: Vec<usize>,
110 strides: Vec<usize>,
111 }
112
113 impl<T: Clone> NAVideoBuffer<T> {
114 pub fn get_offset(&self, idx: usize) -> usize {
115 if idx >= self.offs.len() { 0 }
116 else { self.offs[idx] }
117 }
118 pub fn get_info(&self) -> NAVideoInfo { self.info }
119 pub fn get_data(&self) -> &Vec<T> { self.data.as_ref() }
120 pub fn get_data_mut(&mut self) -> Option<&mut Vec<T>> { self.data.as_mut() }
121 pub fn copy_buffer(&mut self) -> Self {
122 let mut data: Vec<T> = Vec::with_capacity(self.data.len());
123 data.clone_from(self.data.as_ref());
124 let mut offs: Vec<usize> = Vec::with_capacity(self.offs.len());
125 offs.clone_from(&self.offs);
126 let mut strides: Vec<usize> = Vec::with_capacity(self.strides.len());
127 strides.clone_from(&self.strides);
128 NAVideoBuffer { info: self.info, data: NABufferRef::new(data), offs: offs, strides: strides }
129 }
130 pub fn get_stride(&self, idx: usize) -> usize {
131 if idx >= self.strides.len() { return 0; }
132 self.strides[idx]
133 }
134 pub fn get_dimensions(&self, idx: usize) -> (usize, usize) {
135 get_plane_size(&self.info, idx)
136 }
137 pub fn into_ref(self) -> NABufferRef<Self> {
138 NABufferRef::new(self)
139 }
140 }
141
142 pub type NAVideoBufferRef<T> = NABufferRef<NAVideoBuffer<T>>;
143
144 #[derive(Clone)]
145 pub struct NAAudioBuffer<T> {
146 info: NAAudioInfo,
147 data: NABufferRef<Vec<T>>,
148 offs: Vec<usize>,
149 chmap: NAChannelMap,
150 len: usize,
151 }
152
153 impl<T: Clone> NAAudioBuffer<T> {
154 pub fn get_offset(&self, idx: usize) -> usize {
155 if idx >= self.offs.len() { 0 }
156 else { self.offs[idx] }
157 }
158 pub fn get_info(&self) -> NAAudioInfo { self.info }
159 pub fn get_chmap(&self) -> NAChannelMap { self.chmap.clone() }
160 pub fn get_data(&self) -> &Vec<T> { self.data.as_ref() }
161 pub fn get_data_mut(&mut self) -> Option<&mut Vec<T>> { self.data.as_mut() }
162 pub fn copy_buffer(&mut self) -> Self {
163 let mut data: Vec<T> = Vec::with_capacity(self.data.len());
164 data.clone_from(self.data.as_ref());
165 let mut offs: Vec<usize> = Vec::with_capacity(self.offs.len());
166 offs.clone_from(&self.offs);
167 NAAudioBuffer { info: self.info, data: NABufferRef::new(data), offs: offs, chmap: self.get_chmap(), len: self.len }
168 }
169 pub fn get_length(&self) -> usize { self.len }
170 }
171
172 impl NAAudioBuffer<u8> {
173 pub fn new_from_buf(info: NAAudioInfo, data: NABufferRef<Vec<u8>>, chmap: NAChannelMap) -> Self {
174 let len = data.len();
175 NAAudioBuffer { info: info, data: data, chmap: chmap, offs: Vec::new(), len: len }
176 }
177 }
178
179 #[derive(Clone)]
180 pub enum NABufferType {
181 Video (NAVideoBufferRef<u8>),
182 Video16 (NAVideoBufferRef<u16>),
183 Video32 (NAVideoBufferRef<u32>),
184 VideoPacked(NAVideoBufferRef<u8>),
185 AudioU8 (NAAudioBuffer<u8>),
186 AudioI16 (NAAudioBuffer<i16>),
187 AudioI32 (NAAudioBuffer<i32>),
188 AudioF32 (NAAudioBuffer<f32>),
189 AudioPacked(NAAudioBuffer<u8>),
190 Data (NABufferRef<Vec<u8>>),
191 None,
192 }
193
194 impl NABufferType {
195 pub fn get_offset(&self, idx: usize) -> usize {
196 match *self {
197 NABufferType::Video(ref vb) => vb.get_offset(idx),
198 NABufferType::Video16(ref vb) => vb.get_offset(idx),
199 NABufferType::Video32(ref vb) => vb.get_offset(idx),
200 NABufferType::VideoPacked(ref vb) => vb.get_offset(idx),
201 NABufferType::AudioU8(ref ab) => ab.get_offset(idx),
202 NABufferType::AudioI16(ref ab) => ab.get_offset(idx),
203 NABufferType::AudioF32(ref ab) => ab.get_offset(idx),
204 NABufferType::AudioPacked(ref ab) => ab.get_offset(idx),
205 _ => 0,
206 }
207 }
208 pub fn get_video_info(&self) -> Option<NAVideoInfo> {
209 match *self {
210 NABufferType::Video(ref vb) => Some(vb.get_info()),
211 NABufferType::Video16(ref vb) => Some(vb.get_info()),
212 NABufferType::Video32(ref vb) => Some(vb.get_info()),
213 NABufferType::VideoPacked(ref vb) => Some(vb.get_info()),
214 _ => None,
215 }
216 }
217 pub fn get_vbuf(&self) -> Option<NAVideoBufferRef<u8>> {
218 match *self {
219 NABufferType::Video(ref vb) => Some(vb.clone()),
220 NABufferType::VideoPacked(ref vb) => Some(vb.clone()),
221 _ => None,
222 }
223 }
224 pub fn get_vbuf16(&self) -> Option<NAVideoBufferRef<u16>> {
225 match *self {
226 NABufferType::Video16(ref vb) => Some(vb.clone()),
227 _ => None,
228 }
229 }
230 pub fn get_vbuf32(&self) -> Option<NAVideoBufferRef<u32>> {
231 match *self {
232 NABufferType::Video32(ref vb) => Some(vb.clone()),
233 _ => None,
234 }
235 }
236 pub fn get_abuf_u8(&self) -> Option<NAAudioBuffer<u8>> {
237 match *self {
238 NABufferType::AudioU8(ref ab) => Some(ab.clone()),
239 NABufferType::AudioPacked(ref ab) => Some(ab.clone()),
240 _ => None,
241 }
242 }
243 pub fn get_abuf_i16(&self) -> Option<NAAudioBuffer<i16>> {
244 match *self {
245 NABufferType::AudioI16(ref ab) => Some(ab.clone()),
246 _ => None,
247 }
248 }
249 pub fn get_abuf_i32(&self) -> Option<NAAudioBuffer<i32>> {
250 match *self {
251 NABufferType::AudioI32(ref ab) => Some(ab.clone()),
252 _ => None,
253 }
254 }
255 pub fn get_abuf_f32(&self) -> Option<NAAudioBuffer<f32>> {
256 match *self {
257 NABufferType::AudioF32(ref ab) => Some(ab.clone()),
258 _ => None,
259 }
260 }
261 }
262
263 const NA_SIMPLE_VFRAME_COMPONENTS: usize = 4;
264 pub struct NASimpleVideoFrame<'a, T: Copy> {
265 pub width: [usize; NA_SIMPLE_VFRAME_COMPONENTS],
266 pub height: [usize; NA_SIMPLE_VFRAME_COMPONENTS],
267 pub flip: bool,
268 pub stride: [usize; NA_SIMPLE_VFRAME_COMPONENTS],
269 pub offset: [usize; NA_SIMPLE_VFRAME_COMPONENTS],
270 pub components: usize,
271 pub data: &'a mut Vec<T>,
272 }
273
274 impl<'a, T:Copy> NASimpleVideoFrame<'a, T> {
275 pub fn from_video_buf(vbuf: &'a mut NAVideoBuffer<T>) -> Option<Self> {
276 let vinfo = vbuf.get_info();
277 let components = vinfo.format.components as usize;
278 if components > NA_SIMPLE_VFRAME_COMPONENTS {
279 return None;
280 }
281 let mut w: [usize; NA_SIMPLE_VFRAME_COMPONENTS] = [0; NA_SIMPLE_VFRAME_COMPONENTS];
282 let mut h: [usize; NA_SIMPLE_VFRAME_COMPONENTS] = [0; NA_SIMPLE_VFRAME_COMPONENTS];
283 let mut s: [usize; NA_SIMPLE_VFRAME_COMPONENTS] = [0; NA_SIMPLE_VFRAME_COMPONENTS];
284 let mut o: [usize; NA_SIMPLE_VFRAME_COMPONENTS] = [0; NA_SIMPLE_VFRAME_COMPONENTS];
285 for comp in 0..components {
286 let (width, height) = vbuf.get_dimensions(comp);
287 w[comp] = width;
288 h[comp] = height;
289 s[comp] = vbuf.get_stride(comp);
290 o[comp] = vbuf.get_offset(comp);
291 }
292 let flip = vinfo.flipped;
293 Some(NASimpleVideoFrame {
294 width: w,
295 height: h,
296 flip,
297 stride: s,
298 offset: o,
299 components,
300 data: vbuf.data.as_mut().unwrap(),
301 })
302 }
303 }
304
305 #[derive(Debug,Clone,Copy,PartialEq)]
306 pub enum AllocatorError {
307 TooLargeDimensions,
308 FormatError,
309 }
310
311 pub fn alloc_video_buffer(vinfo: NAVideoInfo, align: u8) -> Result<NABufferType, AllocatorError> {
312 let fmt = &vinfo.format;
313 let mut new_size: usize = 0;
314 let mut offs: Vec<usize> = Vec::new();
315 let mut strides: Vec<usize> = Vec::new();
316
317 for i in 0..fmt.get_num_comp() {
318 if fmt.get_chromaton(i) == None { return Err(AllocatorError::FormatError); }
319 }
320
321 let align_mod = ((1 << align) as usize) - 1;
322 let width = ((vinfo.width as usize) + align_mod) & !align_mod;
323 let height = ((vinfo.height as usize) + align_mod) & !align_mod;
324 let mut max_depth = 0;
325 let mut all_packed = true;
326 let mut all_bytealigned = true;
327 for i in 0..fmt.get_num_comp() {
328 let ochr = fmt.get_chromaton(i);
329 if let None = ochr { continue; }
330 let chr = ochr.unwrap();
331 if !chr.is_packed() {
332 all_packed = false;
333 } else if ((chr.get_shift() + chr.get_depth()) & 7) != 0 {
334 all_bytealigned = false;
335 }
336 max_depth = max(max_depth, chr.get_depth());
337 }
338 let unfit_elem_size = match fmt.get_elem_size() {
339 2 | 4 => false,
340 _ => true,
341 };
342
343 //todo semi-packed like NV12
344 if fmt.is_paletted() {
345 //todo various-sized palettes?
346 let stride = vinfo.get_format().get_chromaton(0).unwrap().get_linesize(width);
347 let pic_sz = stride.checked_mul(height);
348 if pic_sz == None { return Err(AllocatorError::TooLargeDimensions); }
349 let pal_size = 256 * (fmt.get_elem_size() as usize);
350 let new_size = pic_sz.unwrap().checked_add(pal_size);
351 if new_size == None { return Err(AllocatorError::TooLargeDimensions); }
352 offs.push(0);
353 offs.push(stride * height);
354 strides.push(stride);
355 let mut data: Vec<u8> = Vec::with_capacity(new_size.unwrap());
356 data.resize(new_size.unwrap(), 0);
357 let buf: NAVideoBuffer<u8> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs: offs, strides: strides };
358 Ok(NABufferType::Video(buf.into_ref()))
359 } else if !all_packed {
360 for i in 0..fmt.get_num_comp() {
361 let ochr = fmt.get_chromaton(i);
362 if let None = ochr { continue; }
363 let chr = ochr.unwrap();
364 if !vinfo.is_flipped() {
365 offs.push(new_size as usize);
366 }
367 let stride = chr.get_linesize(width);
368 let cur_h = chr.get_height(height);
369 let cur_sz = stride.checked_mul(cur_h);
370 if cur_sz == None { return Err(AllocatorError::TooLargeDimensions); }
371 let new_sz = new_size.checked_add(cur_sz.unwrap());
372 if new_sz == None { return Err(AllocatorError::TooLargeDimensions); }
373 new_size = new_sz.unwrap();
374 if vinfo.is_flipped() {
375 offs.push(new_size as usize);
376 }
377 strides.push(stride);
378 }
379 if max_depth <= 8 {
380 let mut data: Vec<u8> = Vec::with_capacity(new_size);
381 data.resize(new_size, 0);
382 let buf: NAVideoBuffer<u8> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs: offs, strides: strides };
383 Ok(NABufferType::Video(buf.into_ref()))
384 } else if max_depth <= 16 {
385 let mut data: Vec<u16> = Vec::with_capacity(new_size);
386 data.resize(new_size, 0);
387 let buf: NAVideoBuffer<u16> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs: offs, strides: strides };
388 Ok(NABufferType::Video16(buf.into_ref()))
389 } else {
390 let mut data: Vec<u32> = Vec::with_capacity(new_size);
391 data.resize(new_size, 0);
392 let buf: NAVideoBuffer<u32> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs: offs, strides: strides };
393 Ok(NABufferType::Video32(buf.into_ref()))
394 }
395 } else if all_bytealigned || unfit_elem_size {
396 let elem_sz = fmt.get_elem_size();
397 let line_sz = width.checked_mul(elem_sz as usize);
398 if line_sz == None { return Err(AllocatorError::TooLargeDimensions); }
399 let new_sz = line_sz.unwrap().checked_mul(height);
400 if new_sz == None { return Err(AllocatorError::TooLargeDimensions); }
401 new_size = new_sz.unwrap();
402 let mut data: Vec<u8> = Vec::with_capacity(new_size);
403 data.resize(new_size, 0);
404 strides.push(line_sz.unwrap());
405 let buf: NAVideoBuffer<u8> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs: offs, strides: strides };
406 Ok(NABufferType::VideoPacked(buf.into_ref()))
407 } else {
408 let elem_sz = fmt.get_elem_size();
409 let new_sz = width.checked_mul(height);
410 if new_sz == None { return Err(AllocatorError::TooLargeDimensions); }
411 new_size = new_sz.unwrap();
412 match elem_sz {
413 2 => {
414 let mut data: Vec<u16> = Vec::with_capacity(new_size);
415 data.resize(new_size, 0);
416 strides.push(width);
417 let buf: NAVideoBuffer<u16> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs: offs, strides: strides };
418 Ok(NABufferType::Video16(buf.into_ref()))
419 },
420 4 => {
421 let mut data: Vec<u32> = Vec::with_capacity(new_size);
422 data.resize(new_size, 0);
423 strides.push(width);
424 let buf: NAVideoBuffer<u32> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs: offs, strides: strides };
425 Ok(NABufferType::Video32(buf.into_ref()))
426 },
427 _ => unreachable!(),
428 }
429 }
430 }
431
432 pub fn alloc_audio_buffer(ainfo: NAAudioInfo, nsamples: usize, chmap: NAChannelMap) -> Result<NABufferType, AllocatorError> {
433 let mut offs: Vec<usize> = Vec::new();
434 if ainfo.format.is_planar() || (ainfo.channels == 1 && (ainfo.format.get_bits() % 8) == 0) {
435 let len = nsamples.checked_mul(ainfo.channels as usize);
436 if len == None { return Err(AllocatorError::TooLargeDimensions); }
437 let length = len.unwrap();
438 for i in 0..ainfo.channels {
439 offs.push((i as usize) * nsamples);
440 }
441 if ainfo.format.is_float() {
442 if ainfo.format.get_bits() == 32 {
443 let mut data: Vec<f32> = Vec::with_capacity(length);
444 data.resize(length, 0.0);
445 let buf: NAAudioBuffer<f32> = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs: offs, chmap: chmap, len: nsamples };
446 Ok(NABufferType::AudioF32(buf))
447 } else {
448 Err(AllocatorError::TooLargeDimensions)
449 }
450 } else {
451 if ainfo.format.get_bits() == 8 && !ainfo.format.is_signed() {
452 let mut data: Vec<u8> = Vec::with_capacity(length);
453 data.resize(length, 0);
454 let buf: NAAudioBuffer<u8> = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs: offs, chmap: chmap, len: nsamples };
455 Ok(NABufferType::AudioU8(buf))
456 } else if ainfo.format.get_bits() == 16 && ainfo.format.is_signed() {
457 let mut data: Vec<i16> = Vec::with_capacity(length);
458 data.resize(length, 0);
459 let buf: NAAudioBuffer<i16> = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs: offs, chmap: chmap, len: nsamples };
460 Ok(NABufferType::AudioI16(buf))
461 } else {
462 Err(AllocatorError::TooLargeDimensions)
463 }
464 }
465 } else {
466 let len = nsamples.checked_mul(ainfo.channels as usize);
467 if len == None { return Err(AllocatorError::TooLargeDimensions); }
468 let length = ainfo.format.get_audio_size(len.unwrap() as u64);
469 let mut data: Vec<u8> = Vec::with_capacity(length);
470 data.resize(length, 0);
471 let buf: NAAudioBuffer<u8> = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs: offs, chmap: chmap, len: nsamples };
472 Ok(NABufferType::AudioPacked(buf))
473 }
474 }
475
476 pub fn alloc_data_buffer(size: usize) -> Result<NABufferType, AllocatorError> {
477 let mut data: Vec<u8> = Vec::with_capacity(size);
478 data.resize(size, 0);
479 let buf: NABufferRef<Vec<u8>> = NABufferRef::new(data);
480 Ok(NABufferType::Data(buf))
481 }
482
483 pub fn copy_buffer(buf: NABufferType) -> NABufferType {
484 buf.clone()
485 }
486
487 pub struct NAVideoBufferPool<T:Copy> {
488 pool: Vec<NAVideoBufferRef<T>>,
489 max_len: usize,
490 add_len: usize,
491 }
492
493 impl<T:Copy> NAVideoBufferPool<T> {
494 pub fn new(max_len: usize) -> Self {
495 Self {
496 pool: Vec::with_capacity(max_len),
497 max_len,
498 add_len: 0,
499 }
500 }
501 pub fn set_dec_bufs(&mut self, add_len: usize) {
502 self.add_len = add_len;
503 }
504 pub fn get_free(&mut self) -> Option<NAVideoBufferRef<T>> {
505 for e in self.pool.iter() {
506 if e.get_num_refs() == 1 {
507 return Some(e.clone());
508 }
509 }
510 None
511 }
512 pub fn get_copy(&mut self, rbuf: &NAVideoBufferRef<T>) -> Option<NAVideoBufferRef<T>> {
513 let res = self.get_free();
514 if res.is_none() {
515 return None;
516 }
517 let mut dbuf = res.unwrap();
518 dbuf.data.copy_from_slice(&rbuf.data);
519 Some(dbuf)
520 }
521 pub fn reset(&mut self) {
522 self.pool.truncate(0);
523 }
524 }
525
526 impl NAVideoBufferPool<u8> {
527 pub fn prealloc_video(&mut self, vinfo: NAVideoInfo, align: u8) -> Result<(), AllocatorError> {
528 let nbufs = self.max_len + self.add_len - self.pool.len();
529 for _ in 0..nbufs {
530 let vbuf = alloc_video_buffer(vinfo.clone(), align)?;
531 if let NABufferType::Video(buf) = vbuf {
532 self.pool.push(buf);
533 } else if let NABufferType::VideoPacked(buf) = vbuf {
534 self.pool.push(buf);
535 } else {
536 return Err(AllocatorError::FormatError);
537 }
538 }
539 Ok(())
540 }
541 }
542
543 impl NAVideoBufferPool<u16> {
544 pub fn prealloc_video(&mut self, vinfo: NAVideoInfo, align: u8) -> Result<(), AllocatorError> {
545 let nbufs = self.max_len + self.add_len - self.pool.len();
546 for _ in 0..nbufs {
547 let vbuf = alloc_video_buffer(vinfo.clone(), align)?;
548 if let NABufferType::Video16(buf) = vbuf {
549 self.pool.push(buf);
550 } else {
551 return Err(AllocatorError::FormatError);
552 }
553 }
554 Ok(())
555 }
556 }
557
558 impl NAVideoBufferPool<u32> {
559 pub fn prealloc_video(&mut self, vinfo: NAVideoInfo, align: u8) -> Result<(), AllocatorError> {
560 let nbufs = self.max_len + self.add_len - self.pool.len();
561 for _ in 0..nbufs {
562 let vbuf = alloc_video_buffer(vinfo.clone(), align)?;
563 if let NABufferType::Video32(buf) = vbuf {
564 self.pool.push(buf);
565 } else {
566 return Err(AllocatorError::FormatError);
567 }
568 }
569 Ok(())
570 }
571 }
572
573 #[allow(dead_code)]
574 #[derive(Clone)]
575 pub struct NACodecInfo {
576 name: &'static str,
577 properties: NACodecTypeInfo,
578 extradata: Option<Arc<Vec<u8>>>,
579 }
580
581 pub type NACodecInfoRef = Arc<NACodecInfo>;
582
583 impl NACodecInfo {
584 pub fn new(name: &'static str, p: NACodecTypeInfo, edata: Option<Vec<u8>>) -> Self {
585 let extradata = match edata {
586 None => None,
587 Some(vec) => Some(Arc::new(vec)),
588 };
589 NACodecInfo { name: name, properties: p, extradata: extradata }
590 }
591 pub fn new_ref(name: &'static str, p: NACodecTypeInfo, edata: Option<Arc<Vec<u8>>>) -> Self {
592 NACodecInfo { name: name, properties: p, extradata: edata }
593 }
594 pub fn into_ref(self) -> NACodecInfoRef { Arc::new(self) }
595 pub fn get_properties(&self) -> NACodecTypeInfo { self.properties }
596 pub fn get_extradata(&self) -> Option<Arc<Vec<u8>>> {
597 if let Some(ref vec) = self.extradata { return Some(vec.clone()); }
598 None
599 }
600 pub fn get_name(&self) -> &'static str { self.name }
601 pub fn is_video(&self) -> bool {
602 if let NACodecTypeInfo::Video(_) = self.properties { return true; }
603 false
604 }
605 pub fn is_audio(&self) -> bool {
606 if let NACodecTypeInfo::Audio(_) = self.properties { return true; }
607 false
608 }
609 pub fn new_dummy() -> Arc<Self> {
610 Arc::new(DUMMY_CODEC_INFO)
611 }
612 pub fn replace_info(&self, p: NACodecTypeInfo) -> Arc<Self> {
613 Arc::new(NACodecInfo { name: self.name, properties: p, extradata: self.extradata.clone() })
614 }
615 }
616
617 impl Default for NACodecInfo {
618 fn default() -> Self { DUMMY_CODEC_INFO }
619 }
620
621 impl fmt::Display for NACodecInfo {
622 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
623 let edata = match self.extradata.clone() {
624 None => format!("no extradata"),
625 Some(v) => format!("{} byte(s) of extradata", v.len()),
626 };
627 write!(f, "{}: {} {}", self.name, self.properties, edata)
628 }
629 }
630
631 pub const DUMMY_CODEC_INFO: NACodecInfo = NACodecInfo {
632 name: "none",
633 properties: NACodecTypeInfo::None,
634 extradata: None };
635
636 #[derive(Debug,Clone)]
637 pub enum NAValue {
638 None,
639 Int(i32),
640 Long(i64),
641 String(String),
642 Data(Arc<Vec<u8>>),
643 }
644
645 #[derive(Debug,Clone,Copy,PartialEq)]
646 #[allow(dead_code)]
647 pub enum FrameType {
648 I,
649 P,
650 B,
651 Skip,
652 Other,
653 }
654
655 impl fmt::Display for FrameType {
656 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
657 match *self {
658 FrameType::I => write!(f, "I"),
659 FrameType::P => write!(f, "P"),
660 FrameType::B => write!(f, "B"),
661 FrameType::Skip => write!(f, "skip"),
662 FrameType::Other => write!(f, "x"),
663 }
664 }
665 }
666
667 #[derive(Debug,Clone,Copy)]
668 pub struct NATimeInfo {
669 pts: Option<u64>,
670 dts: Option<u64>,
671 duration: Option<u64>,
672 tb_num: u32,
673 tb_den: u32,
674 }
675
676 impl NATimeInfo {
677 pub fn new(pts: Option<u64>, dts: Option<u64>, duration: Option<u64>, tb_num: u32, tb_den: u32) -> Self {
678 NATimeInfo { pts: pts, dts: dts, duration: duration, tb_num: tb_num, tb_den: tb_den }
679 }
680 pub fn get_pts(&self) -> Option<u64> { self.pts }
681 pub fn get_dts(&self) -> Option<u64> { self.dts }
682 pub fn get_duration(&self) -> Option<u64> { self.duration }
683 pub fn set_pts(&mut self, pts: Option<u64>) { self.pts = pts; }
684 pub fn set_dts(&mut self, dts: Option<u64>) { self.dts = dts; }
685 pub fn set_duration(&mut self, dur: Option<u64>) { self.duration = dur; }
686 }
687
688 #[allow(dead_code)]
689 #[derive(Clone)]
690 pub struct NAFrame {
691 ts: NATimeInfo,
692 buffer: NABufferType,
693 info: NACodecInfoRef,
694 ftype: FrameType,
695 key: bool,
696 options: HashMap<String, NAValue>,
697 }
698
699 pub type NAFrameRef = Arc<NAFrame>;
700
701 fn get_plane_size(info: &NAVideoInfo, idx: usize) -> (usize, usize) {
702 let chromaton = info.get_format().get_chromaton(idx);
703 if let None = chromaton { return (0, 0); }
704 let (hs, vs) = chromaton.unwrap().get_subsampling();
705 let w = (info.get_width() + ((1 << hs) - 1)) >> hs;
706 let h = (info.get_height() + ((1 << vs) - 1)) >> vs;
707 (w, h)
708 }
709
710 impl NAFrame {
711 pub fn new(ts: NATimeInfo,
712 ftype: FrameType,
713 keyframe: bool,
714 info: NACodecInfoRef,
715 options: HashMap<String, NAValue>,
716 buffer: NABufferType) -> Self {
717 NAFrame { ts: ts, buffer: buffer, info: info, ftype: ftype, key: keyframe, options: options }
718 }
719 pub fn get_info(&self) -> NACodecInfoRef { self.info.clone() }
720 pub fn get_frame_type(&self) -> FrameType { self.ftype }
721 pub fn is_keyframe(&self) -> bool { self.key }
722 pub fn set_frame_type(&mut self, ftype: FrameType) { self.ftype = ftype; }
723 pub fn set_keyframe(&mut self, key: bool) { self.key = key; }
724 pub fn get_time_information(&self) -> NATimeInfo { self.ts }
725 pub fn get_pts(&self) -> Option<u64> { self.ts.get_pts() }
726 pub fn get_dts(&self) -> Option<u64> { self.ts.get_dts() }
727 pub fn get_duration(&self) -> Option<u64> { self.ts.get_duration() }
728 pub fn set_pts(&mut self, pts: Option<u64>) { self.ts.set_pts(pts); }
729 pub fn set_dts(&mut self, dts: Option<u64>) { self.ts.set_dts(dts); }
730 pub fn set_duration(&mut self, dur: Option<u64>) { self.ts.set_duration(dur); }
731
732 pub fn get_buffer(&self) -> NABufferType { self.buffer.clone() }
733
734 pub fn into_ref(self) -> NAFrameRef { Arc::new(self) }
735 }
736
737 impl fmt::Display for NAFrame {
738 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
739 let mut foo = format!("frame type {}", self.ftype);
740 if let Some(pts) = self.ts.pts { foo = format!("{} pts {}", foo, pts); }
741 if let Some(dts) = self.ts.dts { foo = format!("{} dts {}", foo, dts); }
742 if let Some(dur) = self.ts.duration { foo = format!("{} duration {}", foo, dur); }
743 if self.key { foo = format!("{} kf", foo); }
744 write!(f, "[{}]", foo)
745 }
746 }
747
748 /// Possible stream types.
749 #[derive(Debug,Clone,Copy)]
750 #[allow(dead_code)]
751 pub enum StreamType {
752 /// video stream
753 Video,
754 /// audio stream
755 Audio,
756 /// subtitles
757 Subtitles,
758 /// any data stream (or might be an unrecognized audio/video stream)
759 Data,
760 /// nonexistent stream
761 None,
762 }
763
764 impl fmt::Display for StreamType {
765 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
766 match *self {
767 StreamType::Video => write!(f, "Video"),
768 StreamType::Audio => write!(f, "Audio"),
769 StreamType::Subtitles => write!(f, "Subtitles"),
770 StreamType::Data => write!(f, "Data"),
771 StreamType::None => write!(f, "-"),
772 }
773 }
774 }
775
776 #[allow(dead_code)]
777 #[derive(Clone)]
778 pub struct NAStream {
779 media_type: StreamType,
780 id: u32,
781 num: usize,
782 info: NACodecInfoRef,
783 tb_num: u32,
784 tb_den: u32,
785 }
786
787 pub type NAStreamRef = Arc<NAStream>;
788
789 pub fn reduce_timebase(tb_num: u32, tb_den: u32) -> (u32, u32) {
790 if tb_num == 0 { return (tb_num, tb_den); }
791 if (tb_den % tb_num) == 0 { return (1, tb_den / tb_num); }
792
793 let mut a = tb_num;
794 let mut b = tb_den;
795
796 while a != b {
797 if a > b { a -= b; }
798 else if b > a { b -= a; }
799 }
800
801 (tb_num / a, tb_den / a)
802 }
803
804 impl NAStream {
805 pub fn new(mt: StreamType, id: u32, info: NACodecInfo, tb_num: u32, tb_den: u32) -> Self {
806 let (n, d) = reduce_timebase(tb_num, tb_den);
807 NAStream { media_type: mt, id: id, num: 0, info: info.into_ref(), tb_num: n, tb_den: d }
808 }
809 pub fn get_id(&self) -> u32 { self.id }
810 pub fn get_num(&self) -> usize { self.num }
811 pub fn set_num(&mut self, num: usize) { self.num = num; }
812 pub fn get_info(&self) -> NACodecInfoRef { self.info.clone() }
813 pub fn get_timebase(&self) -> (u32, u32) { (self.tb_num, self.tb_den) }
814 pub fn set_timebase(&mut self, tb_num: u32, tb_den: u32) {
815 let (n, d) = reduce_timebase(tb_num, tb_den);
816 self.tb_num = n;
817 self.tb_den = d;
818 }
819 pub fn into_ref(self) -> NAStreamRef { Arc::new(self) }
820 }
821
822 impl fmt::Display for NAStream {
823 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
824 write!(f, "({}#{} @ {}/{} - {})", self.media_type, self.id, self.tb_num, self.tb_den, self.info.get_properties())
825 }
826 }
827
828 #[allow(dead_code)]
829 pub struct NAPacket {
830 stream: NAStreamRef,
831 ts: NATimeInfo,
832 buffer: NABufferRef<Vec<u8>>,
833 keyframe: bool,
834 // options: HashMap<String, NAValue<'a>>,
835 }
836
837 impl NAPacket {
838 pub fn new(str: NAStreamRef, ts: NATimeInfo, kf: bool, vec: Vec<u8>) -> Self {
839 // let mut vec: Vec<u8> = Vec::new();
840 // vec.resize(size, 0);
841 NAPacket { stream: str, ts: ts, keyframe: kf, buffer: NABufferRef::new(vec) }
842 }
843 pub fn get_stream(&self) -> NAStreamRef { self.stream.clone() }
844 pub fn get_time_information(&self) -> NATimeInfo { self.ts }
845 pub fn get_pts(&self) -> Option<u64> { self.ts.get_pts() }
846 pub fn get_dts(&self) -> Option<u64> { self.ts.get_dts() }
847 pub fn get_duration(&self) -> Option<u64> { self.ts.get_duration() }
848 pub fn is_keyframe(&self) -> bool { self.keyframe }
849 pub fn get_buffer(&self) -> NABufferRef<Vec<u8>> { self.buffer.clone() }
850 }
851
852 impl Drop for NAPacket {
853 fn drop(&mut self) {}
854 }
855
856 impl fmt::Display for NAPacket {
857 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
858 let mut foo = format!("[pkt for {} size {}", self.stream, self.buffer.len());
859 if let Some(pts) = self.ts.pts { foo = format!("{} pts {}", foo, pts); }
860 if let Some(dts) = self.ts.dts { foo = format!("{} dts {}", foo, dts); }
861 if let Some(dur) = self.ts.duration { foo = format!("{} duration {}", foo, dur); }
862 if self.keyframe { foo = format!("{} kf", foo); }
863 foo = foo + "]";
864 write!(f, "{}", foo)
865 }
866 }
867
868 pub trait FrameFromPacket {
869 fn new_from_pkt(pkt: &NAPacket, info: NACodecInfoRef, buf: NABufferType) -> NAFrame;
870 fn fill_timestamps(&mut self, pkt: &NAPacket);
871 }
872
873 impl FrameFromPacket for NAFrame {
874 fn new_from_pkt(pkt: &NAPacket, info: NACodecInfoRef, buf: NABufferType) -> NAFrame {
875 NAFrame::new(pkt.ts, FrameType::Other, pkt.keyframe, info, HashMap::new(), buf)
876 }
877 fn fill_timestamps(&mut self, pkt: &NAPacket) {
878 self.ts = pkt.get_time_information();
879 }
880 }
881