core/frame: make fields of NAAudioInfo public
[nihav.git] / nihav-core / src / frame.rs
1 use std::cmp::max;
2 use std::collections::HashMap;
3 use std::fmt;
4 use std::sync::Arc;
5 pub use crate::formats::*;
6 pub use crate::refs::*;
7
8 #[allow(dead_code)]
9 #[derive(Clone,Copy,PartialEq)]
10 pub struct NAAudioInfo {
11 pub sample_rate: u32,
12 pub channels: u8,
13 pub format: NASoniton,
14 pub block_len: usize,
15 }
16
17 impl NAAudioInfo {
18 pub fn new(sr: u32, ch: u8, fmt: NASoniton, bl: usize) -> Self {
19 NAAudioInfo { sample_rate: sr, channels: ch, format: fmt, block_len: bl }
20 }
21 pub fn get_sample_rate(&self) -> u32 { self.sample_rate }
22 pub fn get_channels(&self) -> u8 { self.channels }
23 pub fn get_format(&self) -> NASoniton { self.format }
24 pub fn get_block_len(&self) -> usize { self.block_len }
25 }
26
27 impl fmt::Display for NAAudioInfo {
28 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
29 write!(f, "{} Hz, {} ch", self.sample_rate, self.channels)
30 }
31 }
32
33 #[allow(dead_code)]
34 #[derive(Clone,Copy,PartialEq)]
35 pub struct NAVideoInfo {
36 width: usize,
37 height: usize,
38 flipped: bool,
39 format: NAPixelFormaton,
40 }
41
42 impl NAVideoInfo {
43 pub fn new(w: usize, h: usize, flip: bool, fmt: NAPixelFormaton) -> Self {
44 NAVideoInfo { width: w, height: h, flipped: flip, format: fmt }
45 }
46 pub fn get_width(&self) -> usize { self.width as usize }
47 pub fn get_height(&self) -> usize { self.height as usize }
48 pub fn is_flipped(&self) -> bool { self.flipped }
49 pub fn get_format(&self) -> NAPixelFormaton { self.format }
50 pub fn set_width(&mut self, w: usize) { self.width = w; }
51 pub fn set_height(&mut self, h: usize) { self.height = h; }
52 }
53
54 impl fmt::Display for NAVideoInfo {
55 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
56 write!(f, "{}x{}", self.width, self.height)
57 }
58 }
59
60 #[derive(Clone,Copy,PartialEq)]
61 pub enum NACodecTypeInfo {
62 None,
63 Audio(NAAudioInfo),
64 Video(NAVideoInfo),
65 }
66
67 impl NACodecTypeInfo {
68 pub fn get_video_info(&self) -> Option<NAVideoInfo> {
69 match *self {
70 NACodecTypeInfo::Video(vinfo) => Some(vinfo),
71 _ => None,
72 }
73 }
74 pub fn get_audio_info(&self) -> Option<NAAudioInfo> {
75 match *self {
76 NACodecTypeInfo::Audio(ainfo) => Some(ainfo),
77 _ => None,
78 }
79 }
80 pub fn is_video(&self) -> bool {
81 match *self {
82 NACodecTypeInfo::Video(_) => true,
83 _ => false,
84 }
85 }
86 pub fn is_audio(&self) -> bool {
87 match *self {
88 NACodecTypeInfo::Audio(_) => true,
89 _ => false,
90 }
91 }
92 }
93
94 impl fmt::Display for NACodecTypeInfo {
95 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
96 let ret = match *self {
97 NACodecTypeInfo::None => "".to_string(),
98 NACodecTypeInfo::Audio(fmt) => format!("{}", fmt),
99 NACodecTypeInfo::Video(fmt) => format!("{}", fmt),
100 };
101 write!(f, "{}", ret)
102 }
103 }
104
105 #[derive(Clone)]
106 pub struct NAVideoBuffer<T> {
107 info: NAVideoInfo,
108 data: NABufferRef<Vec<T>>,
109 offs: Vec<usize>,
110 strides: Vec<usize>,
111 }
112
113 impl<T: Clone> NAVideoBuffer<T> {
114 pub fn get_offset(&self, idx: usize) -> usize {
115 if idx >= self.offs.len() { 0 }
116 else { self.offs[idx] }
117 }
118 pub fn get_info(&self) -> NAVideoInfo { self.info }
119 pub fn get_data(&self) -> &Vec<T> { self.data.as_ref() }
120 pub fn get_data_mut(&mut self) -> Option<&mut Vec<T>> { self.data.as_mut() }
121 pub fn copy_buffer(&mut self) -> Self {
122 let mut data: Vec<T> = Vec::with_capacity(self.data.len());
123 data.clone_from(self.data.as_ref());
124 let mut offs: Vec<usize> = Vec::with_capacity(self.offs.len());
125 offs.clone_from(&self.offs);
126 let mut strides: Vec<usize> = Vec::with_capacity(self.strides.len());
127 strides.clone_from(&self.strides);
128 NAVideoBuffer { info: self.info, data: NABufferRef::new(data), offs, strides }
129 }
130 pub fn get_stride(&self, idx: usize) -> usize {
131 if idx >= self.strides.len() { return 0; }
132 self.strides[idx]
133 }
134 pub fn get_dimensions(&self, idx: usize) -> (usize, usize) {
135 get_plane_size(&self.info, idx)
136 }
137 pub fn into_ref(self) -> NABufferRef<Self> {
138 NABufferRef::new(self)
139 }
140 }
141
142 pub type NAVideoBufferRef<T> = NABufferRef<NAVideoBuffer<T>>;
143
144 #[derive(Clone)]
145 pub struct NAAudioBuffer<T> {
146 info: NAAudioInfo,
147 data: NABufferRef<Vec<T>>,
148 offs: Vec<usize>,
149 stride: usize,
150 chmap: NAChannelMap,
151 len: usize,
152 }
153
154 impl<T: Clone> NAAudioBuffer<T> {
155 pub fn get_offset(&self, idx: usize) -> usize {
156 if idx >= self.offs.len() { 0 }
157 else { self.offs[idx] }
158 }
159 pub fn get_stride(&self) -> usize { self.stride }
160 pub fn get_info(&self) -> NAAudioInfo { self.info }
161 pub fn get_chmap(&self) -> &NAChannelMap { &self.chmap }
162 pub fn get_data(&self) -> &Vec<T> { self.data.as_ref() }
163 pub fn get_data_mut(&mut self) -> Option<&mut Vec<T>> { self.data.as_mut() }
164 pub fn copy_buffer(&mut self) -> Self {
165 let mut data: Vec<T> = Vec::with_capacity(self.data.len());
166 data.clone_from(self.data.as_ref());
167 let mut offs: Vec<usize> = Vec::with_capacity(self.offs.len());
168 offs.clone_from(&self.offs);
169 NAAudioBuffer { info: self.info, data: NABufferRef::new(data), offs, chmap: self.get_chmap().clone(), len: self.len, stride: self.stride }
170 }
171 pub fn get_length(&self) -> usize { self.len }
172 }
173
174 impl NAAudioBuffer<u8> {
175 pub fn new_from_buf(info: NAAudioInfo, data: NABufferRef<Vec<u8>>, chmap: NAChannelMap) -> Self {
176 let len = data.len();
177 NAAudioBuffer { info, data, chmap, offs: Vec::new(), len, stride: 0 }
178 }
179 }
180
181 #[derive(Clone)]
182 pub enum NABufferType {
183 Video (NAVideoBufferRef<u8>),
184 Video16 (NAVideoBufferRef<u16>),
185 Video32 (NAVideoBufferRef<u32>),
186 VideoPacked(NAVideoBufferRef<u8>),
187 AudioU8 (NAAudioBuffer<u8>),
188 AudioI16 (NAAudioBuffer<i16>),
189 AudioI32 (NAAudioBuffer<i32>),
190 AudioF32 (NAAudioBuffer<f32>),
191 AudioPacked(NAAudioBuffer<u8>),
192 Data (NABufferRef<Vec<u8>>),
193 None,
194 }
195
196 impl NABufferType {
197 pub fn get_offset(&self, idx: usize) -> usize {
198 match *self {
199 NABufferType::Video(ref vb) => vb.get_offset(idx),
200 NABufferType::Video16(ref vb) => vb.get_offset(idx),
201 NABufferType::Video32(ref vb) => vb.get_offset(idx),
202 NABufferType::VideoPacked(ref vb) => vb.get_offset(idx),
203 NABufferType::AudioU8(ref ab) => ab.get_offset(idx),
204 NABufferType::AudioI16(ref ab) => ab.get_offset(idx),
205 NABufferType::AudioF32(ref ab) => ab.get_offset(idx),
206 NABufferType::AudioPacked(ref ab) => ab.get_offset(idx),
207 _ => 0,
208 }
209 }
210 pub fn get_video_info(&self) -> Option<NAVideoInfo> {
211 match *self {
212 NABufferType::Video(ref vb) => Some(vb.get_info()),
213 NABufferType::Video16(ref vb) => Some(vb.get_info()),
214 NABufferType::Video32(ref vb) => Some(vb.get_info()),
215 NABufferType::VideoPacked(ref vb) => Some(vb.get_info()),
216 _ => None,
217 }
218 }
219 pub fn get_vbuf(&self) -> Option<NAVideoBufferRef<u8>> {
220 match *self {
221 NABufferType::Video(ref vb) => Some(vb.clone()),
222 NABufferType::VideoPacked(ref vb) => Some(vb.clone()),
223 _ => None,
224 }
225 }
226 pub fn get_vbuf16(&self) -> Option<NAVideoBufferRef<u16>> {
227 match *self {
228 NABufferType::Video16(ref vb) => Some(vb.clone()),
229 _ => None,
230 }
231 }
232 pub fn get_vbuf32(&self) -> Option<NAVideoBufferRef<u32>> {
233 match *self {
234 NABufferType::Video32(ref vb) => Some(vb.clone()),
235 _ => None,
236 }
237 }
238 pub fn get_audio_info(&self) -> Option<NAAudioInfo> {
239 match *self {
240 NABufferType::AudioU8(ref ab) => Some(ab.get_info()),
241 NABufferType::AudioI16(ref ab) => Some(ab.get_info()),
242 NABufferType::AudioI32(ref ab) => Some(ab.get_info()),
243 NABufferType::AudioF32(ref ab) => Some(ab.get_info()),
244 NABufferType::AudioPacked(ref ab) => Some(ab.get_info()),
245 _ => None,
246 }
247 }
248 pub fn get_chmap(&self) -> Option<&NAChannelMap> {
249 match *self {
250 NABufferType::AudioU8(ref ab) => Some(ab.get_chmap()),
251 NABufferType::AudioI16(ref ab) => Some(ab.get_chmap()),
252 NABufferType::AudioI32(ref ab) => Some(ab.get_chmap()),
253 NABufferType::AudioF32(ref ab) => Some(ab.get_chmap()),
254 NABufferType::AudioPacked(ref ab) => Some(ab.get_chmap()),
255 _ => None,
256 }
257 }
258 pub fn get_audio_length(&self) -> usize {
259 match *self {
260 NABufferType::AudioU8(ref ab) => ab.get_length(),
261 NABufferType::AudioI16(ref ab) => ab.get_length(),
262 NABufferType::AudioI32(ref ab) => ab.get_length(),
263 NABufferType::AudioF32(ref ab) => ab.get_length(),
264 NABufferType::AudioPacked(ref ab) => ab.get_length(),
265 _ => 0,
266 }
267 }
268 pub fn get_audio_stride(&self) -> usize {
269 match *self {
270 NABufferType::AudioU8(ref ab) => ab.get_stride(),
271 NABufferType::AudioI16(ref ab) => ab.get_stride(),
272 NABufferType::AudioI32(ref ab) => ab.get_stride(),
273 NABufferType::AudioF32(ref ab) => ab.get_stride(),
274 NABufferType::AudioPacked(ref ab) => ab.get_stride(),
275 _ => 0,
276 }
277 }
278 pub fn get_abuf_u8(&self) -> Option<NAAudioBuffer<u8>> {
279 match *self {
280 NABufferType::AudioU8(ref ab) => Some(ab.clone()),
281 NABufferType::AudioPacked(ref ab) => Some(ab.clone()),
282 _ => None,
283 }
284 }
285 pub fn get_abuf_i16(&self) -> Option<NAAudioBuffer<i16>> {
286 match *self {
287 NABufferType::AudioI16(ref ab) => Some(ab.clone()),
288 _ => None,
289 }
290 }
291 pub fn get_abuf_i32(&self) -> Option<NAAudioBuffer<i32>> {
292 match *self {
293 NABufferType::AudioI32(ref ab) => Some(ab.clone()),
294 _ => None,
295 }
296 }
297 pub fn get_abuf_f32(&self) -> Option<NAAudioBuffer<f32>> {
298 match *self {
299 NABufferType::AudioF32(ref ab) => Some(ab.clone()),
300 _ => None,
301 }
302 }
303 }
304
305 const NA_SIMPLE_VFRAME_COMPONENTS: usize = 4;
306 pub struct NASimpleVideoFrame<'a, T: Copy> {
307 pub width: [usize; NA_SIMPLE_VFRAME_COMPONENTS],
308 pub height: [usize; NA_SIMPLE_VFRAME_COMPONENTS],
309 pub flip: bool,
310 pub stride: [usize; NA_SIMPLE_VFRAME_COMPONENTS],
311 pub offset: [usize; NA_SIMPLE_VFRAME_COMPONENTS],
312 pub components: usize,
313 pub data: &'a mut [T],
314 }
315
316 impl<'a, T:Copy> NASimpleVideoFrame<'a, T> {
317 pub fn from_video_buf(vbuf: &'a mut NAVideoBuffer<T>) -> Option<Self> {
318 let vinfo = vbuf.get_info();
319 let components = vinfo.format.components as usize;
320 if components > NA_SIMPLE_VFRAME_COMPONENTS {
321 return None;
322 }
323 let mut w: [usize; NA_SIMPLE_VFRAME_COMPONENTS] = [0; NA_SIMPLE_VFRAME_COMPONENTS];
324 let mut h: [usize; NA_SIMPLE_VFRAME_COMPONENTS] = [0; NA_SIMPLE_VFRAME_COMPONENTS];
325 let mut s: [usize; NA_SIMPLE_VFRAME_COMPONENTS] = [0; NA_SIMPLE_VFRAME_COMPONENTS];
326 let mut o: [usize; NA_SIMPLE_VFRAME_COMPONENTS] = [0; NA_SIMPLE_VFRAME_COMPONENTS];
327 for comp in 0..components {
328 let (width, height) = vbuf.get_dimensions(comp);
329 w[comp] = width;
330 h[comp] = height;
331 s[comp] = vbuf.get_stride(comp);
332 o[comp] = vbuf.get_offset(comp);
333 }
334 let flip = vinfo.flipped;
335 Some(NASimpleVideoFrame {
336 width: w,
337 height: h,
338 flip,
339 stride: s,
340 offset: o,
341 components,
342 data: vbuf.data.as_mut_slice(),
343 })
344 }
345 }
346
347 #[derive(Debug,Clone,Copy,PartialEq)]
348 pub enum AllocatorError {
349 TooLargeDimensions,
350 FormatError,
351 }
352
353 pub fn alloc_video_buffer(vinfo: NAVideoInfo, align: u8) -> Result<NABufferType, AllocatorError> {
354 let fmt = &vinfo.format;
355 let mut new_size: usize = 0;
356 let mut offs: Vec<usize> = Vec::new();
357 let mut strides: Vec<usize> = Vec::new();
358
359 for i in 0..fmt.get_num_comp() {
360 if fmt.get_chromaton(i) == None { return Err(AllocatorError::FormatError); }
361 }
362
363 let align_mod = ((1 << align) as usize) - 1;
364 let width = ((vinfo.width as usize) + align_mod) & !align_mod;
365 let height = ((vinfo.height as usize) + align_mod) & !align_mod;
366 let mut max_depth = 0;
367 let mut all_packed = true;
368 let mut all_bytealigned = true;
369 for i in 0..fmt.get_num_comp() {
370 let ochr = fmt.get_chromaton(i);
371 if ochr.is_none() { continue; }
372 let chr = ochr.unwrap();
373 if !chr.is_packed() {
374 all_packed = false;
375 } else if ((chr.get_shift() + chr.get_depth()) & 7) != 0 {
376 all_bytealigned = false;
377 }
378 max_depth = max(max_depth, chr.get_depth());
379 }
380 let unfit_elem_size = match fmt.get_elem_size() {
381 2 | 4 => false,
382 _ => true,
383 };
384
385 //todo semi-packed like NV12
386 if fmt.is_paletted() {
387 //todo various-sized palettes?
388 let stride = vinfo.get_format().get_chromaton(0).unwrap().get_linesize(width);
389 let pic_sz = stride.checked_mul(height);
390 if pic_sz == None { return Err(AllocatorError::TooLargeDimensions); }
391 let pal_size = 256 * (fmt.get_elem_size() as usize);
392 let new_size = pic_sz.unwrap().checked_add(pal_size);
393 if new_size == None { return Err(AllocatorError::TooLargeDimensions); }
394 offs.push(0);
395 offs.push(stride * height);
396 strides.push(stride);
397 let data: Vec<u8> = vec![0; new_size.unwrap()];
398 let buf: NAVideoBuffer<u8> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
399 Ok(NABufferType::Video(buf.into_ref()))
400 } else if !all_packed {
401 for i in 0..fmt.get_num_comp() {
402 let ochr = fmt.get_chromaton(i);
403 if ochr.is_none() { continue; }
404 let chr = ochr.unwrap();
405 offs.push(new_size as usize);
406 let stride = chr.get_linesize(width);
407 let cur_h = chr.get_height(height);
408 let cur_sz = stride.checked_mul(cur_h);
409 if cur_sz == None { return Err(AllocatorError::TooLargeDimensions); }
410 let new_sz = new_size.checked_add(cur_sz.unwrap());
411 if new_sz == None { return Err(AllocatorError::TooLargeDimensions); }
412 new_size = new_sz.unwrap();
413 strides.push(stride);
414 }
415 if max_depth <= 8 {
416 let data: Vec<u8> = vec![0; new_size];
417 let buf: NAVideoBuffer<u8> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
418 Ok(NABufferType::Video(buf.into_ref()))
419 } else if max_depth <= 16 {
420 let data: Vec<u16> = vec![0; new_size];
421 let buf: NAVideoBuffer<u16> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
422 Ok(NABufferType::Video16(buf.into_ref()))
423 } else {
424 let data: Vec<u32> = vec![0; new_size];
425 let buf: NAVideoBuffer<u32> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
426 Ok(NABufferType::Video32(buf.into_ref()))
427 }
428 } else if all_bytealigned || unfit_elem_size {
429 let elem_sz = fmt.get_elem_size();
430 let line_sz = width.checked_mul(elem_sz as usize);
431 if line_sz == None { return Err(AllocatorError::TooLargeDimensions); }
432 let new_sz = line_sz.unwrap().checked_mul(height);
433 if new_sz == None { return Err(AllocatorError::TooLargeDimensions); }
434 new_size = new_sz.unwrap();
435 let data: Vec<u8> = vec![0; new_size];
436 strides.push(line_sz.unwrap());
437 let buf: NAVideoBuffer<u8> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
438 Ok(NABufferType::VideoPacked(buf.into_ref()))
439 } else {
440 let elem_sz = fmt.get_elem_size();
441 let new_sz = width.checked_mul(height);
442 if new_sz == None { return Err(AllocatorError::TooLargeDimensions); }
443 new_size = new_sz.unwrap();
444 match elem_sz {
445 2 => {
446 let data: Vec<u16> = vec![0; new_size];
447 strides.push(width);
448 let buf: NAVideoBuffer<u16> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
449 Ok(NABufferType::Video16(buf.into_ref()))
450 },
451 4 => {
452 let data: Vec<u32> = vec![0; new_size];
453 strides.push(width);
454 let buf: NAVideoBuffer<u32> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
455 Ok(NABufferType::Video32(buf.into_ref()))
456 },
457 _ => unreachable!(),
458 }
459 }
460 }
461
462 #[allow(clippy::collapsible_if)]
463 pub fn alloc_audio_buffer(ainfo: NAAudioInfo, nsamples: usize, chmap: NAChannelMap) -> Result<NABufferType, AllocatorError> {
464 let mut offs: Vec<usize> = Vec::new();
465 if ainfo.format.is_planar() || (ainfo.channels == 1 && (ainfo.format.get_bits() % 8) == 0) {
466 let len = nsamples.checked_mul(ainfo.channels as usize);
467 if len == None { return Err(AllocatorError::TooLargeDimensions); }
468 let length = len.unwrap();
469 let stride = nsamples;
470 for i in 0..ainfo.channels {
471 offs.push((i as usize) * stride);
472 }
473 if ainfo.format.is_float() {
474 if ainfo.format.get_bits() == 32 {
475 let data: Vec<f32> = vec![0.0; length];
476 let buf: NAAudioBuffer<f32> = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs, chmap, len: nsamples, stride };
477 Ok(NABufferType::AudioF32(buf))
478 } else {
479 Err(AllocatorError::TooLargeDimensions)
480 }
481 } else {
482 if ainfo.format.get_bits() == 8 && !ainfo.format.is_signed() {
483 let data: Vec<u8> = vec![0; length];
484 let buf: NAAudioBuffer<u8> = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs, chmap, len: nsamples, stride };
485 Ok(NABufferType::AudioU8(buf))
486 } else if ainfo.format.get_bits() == 16 && ainfo.format.is_signed() {
487 let data: Vec<i16> = vec![0; length];
488 let buf: NAAudioBuffer<i16> = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs, chmap, len: nsamples, stride };
489 Ok(NABufferType::AudioI16(buf))
490 } else {
491 Err(AllocatorError::TooLargeDimensions)
492 }
493 }
494 } else {
495 let len = nsamples.checked_mul(ainfo.channels as usize);
496 if len == None { return Err(AllocatorError::TooLargeDimensions); }
497 let length = ainfo.format.get_audio_size(len.unwrap() as u64);
498 let data: Vec<u8> = vec![0; length];
499 let buf: NAAudioBuffer<u8> = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs, chmap, len: nsamples, stride: 0 };
500 Ok(NABufferType::AudioPacked(buf))
501 }
502 }
503
504 pub fn alloc_data_buffer(size: usize) -> Result<NABufferType, AllocatorError> {
505 let data: Vec<u8> = vec![0; size];
506 let buf: NABufferRef<Vec<u8>> = NABufferRef::new(data);
507 Ok(NABufferType::Data(buf))
508 }
509
510 pub fn copy_buffer(buf: NABufferType) -> NABufferType {
511 buf.clone()
512 }
513
514 pub struct NAVideoBufferPool<T:Copy> {
515 pool: Vec<NAVideoBufferRef<T>>,
516 max_len: usize,
517 add_len: usize,
518 }
519
520 impl<T:Copy> NAVideoBufferPool<T> {
521 pub fn new(max_len: usize) -> Self {
522 Self {
523 pool: Vec::with_capacity(max_len),
524 max_len,
525 add_len: 0,
526 }
527 }
528 pub fn set_dec_bufs(&mut self, add_len: usize) {
529 self.add_len = add_len;
530 }
531 pub fn get_free(&mut self) -> Option<NAVideoBufferRef<T>> {
532 for e in self.pool.iter() {
533 if e.get_num_refs() == 1 {
534 return Some(e.clone());
535 }
536 }
537 None
538 }
539 pub fn get_copy(&mut self, rbuf: &NAVideoBufferRef<T>) -> Option<NAVideoBufferRef<T>> {
540 let mut dbuf = self.get_free()?;
541 dbuf.data.copy_from_slice(&rbuf.data);
542 Some(dbuf)
543 }
544 pub fn reset(&mut self) {
545 self.pool.truncate(0);
546 }
547 }
548
549 impl NAVideoBufferPool<u8> {
550 pub fn prealloc_video(&mut self, vinfo: NAVideoInfo, align: u8) -> Result<(), AllocatorError> {
551 let nbufs = self.max_len + self.add_len - self.pool.len();
552 for _ in 0..nbufs {
553 let vbuf = alloc_video_buffer(vinfo, align)?;
554 if let NABufferType::Video(buf) = vbuf {
555 self.pool.push(buf);
556 } else if let NABufferType::VideoPacked(buf) = vbuf {
557 self.pool.push(buf);
558 } else {
559 return Err(AllocatorError::FormatError);
560 }
561 }
562 Ok(())
563 }
564 }
565
566 impl NAVideoBufferPool<u16> {
567 pub fn prealloc_video(&mut self, vinfo: NAVideoInfo, align: u8) -> Result<(), AllocatorError> {
568 let nbufs = self.max_len + self.add_len - self.pool.len();
569 for _ in 0..nbufs {
570 let vbuf = alloc_video_buffer(vinfo, align)?;
571 if let NABufferType::Video16(buf) = vbuf {
572 self.pool.push(buf);
573 } else {
574 return Err(AllocatorError::FormatError);
575 }
576 }
577 Ok(())
578 }
579 }
580
581 impl NAVideoBufferPool<u32> {
582 pub fn prealloc_video(&mut self, vinfo: NAVideoInfo, align: u8) -> Result<(), AllocatorError> {
583 let nbufs = self.max_len + self.add_len - self.pool.len();
584 for _ in 0..nbufs {
585 let vbuf = alloc_video_buffer(vinfo, align)?;
586 if let NABufferType::Video32(buf) = vbuf {
587 self.pool.push(buf);
588 } else {
589 return Err(AllocatorError::FormatError);
590 }
591 }
592 Ok(())
593 }
594 }
595
596 #[allow(dead_code)]
597 #[derive(Clone)]
598 pub struct NACodecInfo {
599 name: &'static str,
600 properties: NACodecTypeInfo,
601 extradata: Option<Arc<Vec<u8>>>,
602 }
603
604 pub type NACodecInfoRef = Arc<NACodecInfo>;
605
606 impl NACodecInfo {
607 pub fn new(name: &'static str, p: NACodecTypeInfo, edata: Option<Vec<u8>>) -> Self {
608 let extradata = match edata {
609 None => None,
610 Some(vec) => Some(Arc::new(vec)),
611 };
612 NACodecInfo { name, properties: p, extradata }
613 }
614 pub fn new_ref(name: &'static str, p: NACodecTypeInfo, edata: Option<Arc<Vec<u8>>>) -> Self {
615 NACodecInfo { name, properties: p, extradata: edata }
616 }
617 pub fn into_ref(self) -> NACodecInfoRef { Arc::new(self) }
618 pub fn get_properties(&self) -> NACodecTypeInfo { self.properties }
619 pub fn get_extradata(&self) -> Option<Arc<Vec<u8>>> {
620 if let Some(ref vec) = self.extradata { return Some(vec.clone()); }
621 None
622 }
623 pub fn get_name(&self) -> &'static str { self.name }
624 pub fn is_video(&self) -> bool {
625 if let NACodecTypeInfo::Video(_) = self.properties { return true; }
626 false
627 }
628 pub fn is_audio(&self) -> bool {
629 if let NACodecTypeInfo::Audio(_) = self.properties { return true; }
630 false
631 }
632 pub fn new_dummy() -> Arc<Self> {
633 Arc::new(DUMMY_CODEC_INFO)
634 }
635 pub fn replace_info(&self, p: NACodecTypeInfo) -> Arc<Self> {
636 Arc::new(NACodecInfo { name: self.name, properties: p, extradata: self.extradata.clone() })
637 }
638 }
639
640 impl Default for NACodecInfo {
641 fn default() -> Self { DUMMY_CODEC_INFO }
642 }
643
644 impl fmt::Display for NACodecInfo {
645 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
646 let edata = match self.extradata.clone() {
647 None => "no extradata".to_string(),
648 Some(v) => format!("{} byte(s) of extradata", v.len()),
649 };
650 write!(f, "{}: {} {}", self.name, self.properties, edata)
651 }
652 }
653
654 pub const DUMMY_CODEC_INFO: NACodecInfo = NACodecInfo {
655 name: "none",
656 properties: NACodecTypeInfo::None,
657 extradata: None };
658
659 #[derive(Debug,Clone)]
660 pub enum NAValue {
661 None,
662 Int(i32),
663 Long(i64),
664 String(String),
665 Data(Arc<Vec<u8>>),
666 }
667
668 #[derive(Debug,Clone,Copy,PartialEq)]
669 #[allow(dead_code)]
670 pub enum FrameType {
671 I,
672 P,
673 B,
674 Skip,
675 Other,
676 }
677
678 impl fmt::Display for FrameType {
679 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
680 match *self {
681 FrameType::I => write!(f, "I"),
682 FrameType::P => write!(f, "P"),
683 FrameType::B => write!(f, "B"),
684 FrameType::Skip => write!(f, "skip"),
685 FrameType::Other => write!(f, "x"),
686 }
687 }
688 }
689
690 #[derive(Debug,Clone,Copy)]
691 pub struct NATimeInfo {
692 pts: Option<u64>,
693 dts: Option<u64>,
694 duration: Option<u64>,
695 tb_num: u32,
696 tb_den: u32,
697 }
698
699 impl NATimeInfo {
700 pub fn new(pts: Option<u64>, dts: Option<u64>, duration: Option<u64>, tb_num: u32, tb_den: u32) -> Self {
701 NATimeInfo { pts, dts, duration, tb_num, tb_den }
702 }
703 pub fn get_pts(&self) -> Option<u64> { self.pts }
704 pub fn get_dts(&self) -> Option<u64> { self.dts }
705 pub fn get_duration(&self) -> Option<u64> { self.duration }
706 pub fn set_pts(&mut self, pts: Option<u64>) { self.pts = pts; }
707 pub fn set_dts(&mut self, dts: Option<u64>) { self.dts = dts; }
708 pub fn set_duration(&mut self, dur: Option<u64>) { self.duration = dur; }
709
710 pub fn time_to_ts(time: u64, base: u64, tb_num: u32, tb_den: u32) -> u64 {
711 let tb_num = tb_num as u64;
712 let tb_den = tb_den as u64;
713 let tmp = time.checked_mul(tb_num);
714 if let Some(tmp) = tmp {
715 tmp / base / tb_den
716 } else {
717 let tmp = time.checked_mul(tb_num);
718 if let Some(tmp) = tmp {
719 tmp / base / tb_den
720 } else {
721 let coarse = time / base;
722 let tmp = coarse.checked_mul(tb_num);
723 if let Some(tmp) = tmp {
724 tmp / tb_den
725 } else {
726 (coarse / tb_den) * tb_num
727 }
728 }
729 }
730 }
731 pub fn ts_to_time(ts: u64, base: u64, tb_num: u32, tb_den: u32) -> u64 {
732 let tb_num = tb_num as u64;
733 let tb_den = tb_den as u64;
734 let tmp = ts.checked_mul(base);
735 if let Some(tmp) = tmp {
736 let tmp2 = tmp.checked_mul(tb_num);
737 if let Some(tmp2) = tmp2 {
738 tmp2 / tb_den
739 } else {
740 (tmp / tb_den) * tb_num
741 }
742 } else {
743 let tmp = ts.checked_mul(tb_num);
744 if let Some(tmp) = tmp {
745 (tmp / tb_den) * base
746 } else {
747 (ts / tb_den) * base * tb_num
748 }
749 }
750 }
751 }
752
753 #[allow(dead_code)]
754 #[derive(Clone)]
755 pub struct NAFrame {
756 ts: NATimeInfo,
757 id: i64,
758 buffer: NABufferType,
759 info: NACodecInfoRef,
760 ftype: FrameType,
761 key: bool,
762 options: HashMap<String, NAValue>,
763 }
764
765 pub type NAFrameRef = Arc<NAFrame>;
766
767 fn get_plane_size(info: &NAVideoInfo, idx: usize) -> (usize, usize) {
768 let chromaton = info.get_format().get_chromaton(idx);
769 if chromaton.is_none() { return (0, 0); }
770 let (hs, vs) = chromaton.unwrap().get_subsampling();
771 let w = (info.get_width() + ((1 << hs) - 1)) >> hs;
772 let h = (info.get_height() + ((1 << vs) - 1)) >> vs;
773 (w, h)
774 }
775
776 impl NAFrame {
777 pub fn new(ts: NATimeInfo,
778 ftype: FrameType,
779 keyframe: bool,
780 info: NACodecInfoRef,
781 options: HashMap<String, NAValue>,
782 buffer: NABufferType) -> Self {
783 NAFrame { ts, id: 0, buffer, info, ftype, key: keyframe, options }
784 }
785 pub fn get_info(&self) -> NACodecInfoRef { self.info.clone() }
786 pub fn get_frame_type(&self) -> FrameType { self.ftype }
787 pub fn is_keyframe(&self) -> bool { self.key }
788 pub fn set_frame_type(&mut self, ftype: FrameType) { self.ftype = ftype; }
789 pub fn set_keyframe(&mut self, key: bool) { self.key = key; }
790 pub fn get_time_information(&self) -> NATimeInfo { self.ts }
791 pub fn get_pts(&self) -> Option<u64> { self.ts.get_pts() }
792 pub fn get_dts(&self) -> Option<u64> { self.ts.get_dts() }
793 pub fn get_id(&self) -> i64 { self.id }
794 pub fn get_duration(&self) -> Option<u64> { self.ts.get_duration() }
795 pub fn set_pts(&mut self, pts: Option<u64>) { self.ts.set_pts(pts); }
796 pub fn set_dts(&mut self, dts: Option<u64>) { self.ts.set_dts(dts); }
797 pub fn set_id(&mut self, id: i64) { self.id = id; }
798 pub fn set_duration(&mut self, dur: Option<u64>) { self.ts.set_duration(dur); }
799
800 pub fn get_buffer(&self) -> NABufferType { self.buffer.clone() }
801
802 pub fn into_ref(self) -> NAFrameRef { Arc::new(self) }
803 }
804
805 impl fmt::Display for NAFrame {
806 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
807 let mut ostr = format!("frame type {}", self.ftype);
808 if let Some(pts) = self.ts.pts { ostr = format!("{} pts {}", ostr, pts); }
809 if let Some(dts) = self.ts.dts { ostr = format!("{} dts {}", ostr, dts); }
810 if let Some(dur) = self.ts.duration { ostr = format!("{} duration {}", ostr, dur); }
811 if self.key { ostr = format!("{} kf", ostr); }
812 write!(f, "[{}]", ostr)
813 }
814 }
815
816 /// Possible stream types.
817 #[derive(Debug,Clone,Copy,PartialEq)]
818 #[allow(dead_code)]
819 pub enum StreamType {
820 /// video stream
821 Video,
822 /// audio stream
823 Audio,
824 /// subtitles
825 Subtitles,
826 /// any data stream (or might be an unrecognized audio/video stream)
827 Data,
828 /// nonexistent stream
829 None,
830 }
831
832 impl fmt::Display for StreamType {
833 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
834 match *self {
835 StreamType::Video => write!(f, "Video"),
836 StreamType::Audio => write!(f, "Audio"),
837 StreamType::Subtitles => write!(f, "Subtitles"),
838 StreamType::Data => write!(f, "Data"),
839 StreamType::None => write!(f, "-"),
840 }
841 }
842 }
843
844 #[allow(dead_code)]
845 #[derive(Clone)]
846 pub struct NAStream {
847 media_type: StreamType,
848 id: u32,
849 num: usize,
850 info: NACodecInfoRef,
851 tb_num: u32,
852 tb_den: u32,
853 }
854
855 pub type NAStreamRef = Arc<NAStream>;
856
857 pub fn reduce_timebase(tb_num: u32, tb_den: u32) -> (u32, u32) {
858 if tb_num == 0 { return (tb_num, tb_den); }
859 if (tb_den % tb_num) == 0 { return (1, tb_den / tb_num); }
860
861 let mut a = tb_num;
862 let mut b = tb_den;
863
864 while a != b {
865 if a > b { a -= b; }
866 else if b > a { b -= a; }
867 }
868
869 (tb_num / a, tb_den / a)
870 }
871
872 impl NAStream {
873 pub fn new(mt: StreamType, id: u32, info: NACodecInfo, tb_num: u32, tb_den: u32) -> Self {
874 let (n, d) = reduce_timebase(tb_num, tb_den);
875 NAStream { media_type: mt, id, num: 0, info: info.into_ref(), tb_num: n, tb_den: d }
876 }
877 pub fn get_id(&self) -> u32 { self.id }
878 pub fn get_media_type(&self) -> StreamType { self.media_type }
879 pub fn get_num(&self) -> usize { self.num }
880 pub fn set_num(&mut self, num: usize) { self.num = num; }
881 pub fn get_info(&self) -> NACodecInfoRef { self.info.clone() }
882 pub fn get_timebase(&self) -> (u32, u32) { (self.tb_num, self.tb_den) }
883 pub fn set_timebase(&mut self, tb_num: u32, tb_den: u32) {
884 let (n, d) = reduce_timebase(tb_num, tb_den);
885 self.tb_num = n;
886 self.tb_den = d;
887 }
888 pub fn into_ref(self) -> NAStreamRef { Arc::new(self) }
889 }
890
891 impl fmt::Display for NAStream {
892 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
893 write!(f, "({}#{} @ {}/{} - {})", self.media_type, self.id, self.tb_num, self.tb_den, self.info.get_properties())
894 }
895 }
896
897 #[allow(dead_code)]
898 pub struct NAPacket {
899 stream: NAStreamRef,
900 ts: NATimeInfo,
901 buffer: NABufferRef<Vec<u8>>,
902 keyframe: bool,
903 // options: HashMap<String, NAValue<'a>>,
904 }
905
906 impl NAPacket {
907 pub fn new(str: NAStreamRef, ts: NATimeInfo, kf: bool, vec: Vec<u8>) -> Self {
908 // let mut vec: Vec<u8> = Vec::new();
909 // vec.resize(size, 0);
910 NAPacket { stream: str, ts, keyframe: kf, buffer: NABufferRef::new(vec) }
911 }
912 pub fn get_stream(&self) -> NAStreamRef { self.stream.clone() }
913 pub fn get_time_information(&self) -> NATimeInfo { self.ts }
914 pub fn get_pts(&self) -> Option<u64> { self.ts.get_pts() }
915 pub fn get_dts(&self) -> Option<u64> { self.ts.get_dts() }
916 pub fn get_duration(&self) -> Option<u64> { self.ts.get_duration() }
917 pub fn is_keyframe(&self) -> bool { self.keyframe }
918 pub fn get_buffer(&self) -> NABufferRef<Vec<u8>> { self.buffer.clone() }
919 }
920
921 impl Drop for NAPacket {
922 fn drop(&mut self) {}
923 }
924
925 impl fmt::Display for NAPacket {
926 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
927 let mut ostr = format!("[pkt for {} size {}", self.stream, self.buffer.len());
928 if let Some(pts) = self.ts.pts { ostr = format!("{} pts {}", ostr, pts); }
929 if let Some(dts) = self.ts.dts { ostr = format!("{} dts {}", ostr, dts); }
930 if let Some(dur) = self.ts.duration { ostr = format!("{} duration {}", ostr, dur); }
931 if self.keyframe { ostr = format!("{} kf", ostr); }
932 ostr += "]";
933 write!(f, "{}", ostr)
934 }
935 }
936
937 pub trait FrameFromPacket {
938 fn new_from_pkt(pkt: &NAPacket, info: NACodecInfoRef, buf: NABufferType) -> NAFrame;
939 fn fill_timestamps(&mut self, pkt: &NAPacket);
940 }
941
942 impl FrameFromPacket for NAFrame {
943 fn new_from_pkt(pkt: &NAPacket, info: NACodecInfoRef, buf: NABufferType) -> NAFrame {
944 NAFrame::new(pkt.ts, FrameType::Other, pkt.keyframe, info, HashMap::new(), buf)
945 }
946 fn fill_timestamps(&mut self, pkt: &NAPacket) {
947 self.ts = pkt.get_time_information();
948 }
949 }
950