core/frame: introduce stride for planar audio buffers
[nihav.git] / nihav-core / src / frame.rs
1 use std::cmp::max;
2 use std::collections::HashMap;
3 use std::fmt;
4 use std::sync::Arc;
5 pub use crate::formats::*;
6 pub use crate::refs::*;
7
8 #[allow(dead_code)]
9 #[derive(Clone,Copy,PartialEq)]
10 pub struct NAAudioInfo {
11 sample_rate: u32,
12 channels: u8,
13 format: NASoniton,
14 block_len: usize,
15 }
16
17 impl NAAudioInfo {
18 pub fn new(sr: u32, ch: u8, fmt: NASoniton, bl: usize) -> Self {
19 NAAudioInfo { sample_rate: sr, channels: ch, format: fmt, block_len: bl }
20 }
21 pub fn get_sample_rate(&self) -> u32 { self.sample_rate }
22 pub fn get_channels(&self) -> u8 { self.channels }
23 pub fn get_format(&self) -> NASoniton { self.format }
24 pub fn get_block_len(&self) -> usize { self.block_len }
25 }
26
27 impl fmt::Display for NAAudioInfo {
28 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
29 write!(f, "{} Hz, {} ch", self.sample_rate, self.channels)
30 }
31 }
32
33 #[allow(dead_code)]
34 #[derive(Clone,Copy,PartialEq)]
35 pub struct NAVideoInfo {
36 width: usize,
37 height: usize,
38 flipped: bool,
39 format: NAPixelFormaton,
40 }
41
42 impl NAVideoInfo {
43 pub fn new(w: usize, h: usize, flip: bool, fmt: NAPixelFormaton) -> Self {
44 NAVideoInfo { width: w, height: h, flipped: flip, format: fmt }
45 }
46 pub fn get_width(&self) -> usize { self.width as usize }
47 pub fn get_height(&self) -> usize { self.height as usize }
48 pub fn is_flipped(&self) -> bool { self.flipped }
49 pub fn get_format(&self) -> NAPixelFormaton { self.format }
50 pub fn set_width(&mut self, w: usize) { self.width = w; }
51 pub fn set_height(&mut self, h: usize) { self.height = h; }
52 }
53
54 impl fmt::Display for NAVideoInfo {
55 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
56 write!(f, "{}x{}", self.width, self.height)
57 }
58 }
59
60 #[derive(Clone,Copy,PartialEq)]
61 pub enum NACodecTypeInfo {
62 None,
63 Audio(NAAudioInfo),
64 Video(NAVideoInfo),
65 }
66
67 impl NACodecTypeInfo {
68 pub fn get_video_info(&self) -> Option<NAVideoInfo> {
69 match *self {
70 NACodecTypeInfo::Video(vinfo) => Some(vinfo),
71 _ => None,
72 }
73 }
74 pub fn get_audio_info(&self) -> Option<NAAudioInfo> {
75 match *self {
76 NACodecTypeInfo::Audio(ainfo) => Some(ainfo),
77 _ => None,
78 }
79 }
80 pub fn is_video(&self) -> bool {
81 match *self {
82 NACodecTypeInfo::Video(_) => true,
83 _ => false,
84 }
85 }
86 pub fn is_audio(&self) -> bool {
87 match *self {
88 NACodecTypeInfo::Audio(_) => true,
89 _ => false,
90 }
91 }
92 }
93
94 impl fmt::Display for NACodecTypeInfo {
95 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
96 let ret = match *self {
97 NACodecTypeInfo::None => "".to_string(),
98 NACodecTypeInfo::Audio(fmt) => format!("{}", fmt),
99 NACodecTypeInfo::Video(fmt) => format!("{}", fmt),
100 };
101 write!(f, "{}", ret)
102 }
103 }
104
105 #[derive(Clone)]
106 pub struct NAVideoBuffer<T> {
107 info: NAVideoInfo,
108 data: NABufferRef<Vec<T>>,
109 offs: Vec<usize>,
110 strides: Vec<usize>,
111 }
112
113 impl<T: Clone> NAVideoBuffer<T> {
114 pub fn get_offset(&self, idx: usize) -> usize {
115 if idx >= self.offs.len() { 0 }
116 else { self.offs[idx] }
117 }
118 pub fn get_info(&self) -> NAVideoInfo { self.info }
119 pub fn get_data(&self) -> &Vec<T> { self.data.as_ref() }
120 pub fn get_data_mut(&mut self) -> Option<&mut Vec<T>> { self.data.as_mut() }
121 pub fn copy_buffer(&mut self) -> Self {
122 let mut data: Vec<T> = Vec::with_capacity(self.data.len());
123 data.clone_from(self.data.as_ref());
124 let mut offs: Vec<usize> = Vec::with_capacity(self.offs.len());
125 offs.clone_from(&self.offs);
126 let mut strides: Vec<usize> = Vec::with_capacity(self.strides.len());
127 strides.clone_from(&self.strides);
128 NAVideoBuffer { info: self.info, data: NABufferRef::new(data), offs, strides }
129 }
130 pub fn get_stride(&self, idx: usize) -> usize {
131 if idx >= self.strides.len() { return 0; }
132 self.strides[idx]
133 }
134 pub fn get_dimensions(&self, idx: usize) -> (usize, usize) {
135 get_plane_size(&self.info, idx)
136 }
137 pub fn into_ref(self) -> NABufferRef<Self> {
138 NABufferRef::new(self)
139 }
140 }
141
142 pub type NAVideoBufferRef<T> = NABufferRef<NAVideoBuffer<T>>;
143
144 #[derive(Clone)]
145 pub struct NAAudioBuffer<T> {
146 info: NAAudioInfo,
147 data: NABufferRef<Vec<T>>,
148 offs: Vec<usize>,
149 stride: usize,
150 chmap: NAChannelMap,
151 len: usize,
152 }
153
154 impl<T: Clone> NAAudioBuffer<T> {
155 pub fn get_offset(&self, idx: usize) -> usize {
156 if idx >= self.offs.len() { 0 }
157 else { self.offs[idx] }
158 }
159 pub fn get_stride(&self) -> usize { self.stride }
160 pub fn get_info(&self) -> NAAudioInfo { self.info }
161 pub fn get_chmap(&self) -> NAChannelMap { self.chmap.clone() }
162 pub fn get_data(&self) -> &Vec<T> { self.data.as_ref() }
163 pub fn get_data_mut(&mut self) -> Option<&mut Vec<T>> { self.data.as_mut() }
164 pub fn copy_buffer(&mut self) -> Self {
165 let mut data: Vec<T> = Vec::with_capacity(self.data.len());
166 data.clone_from(self.data.as_ref());
167 let mut offs: Vec<usize> = Vec::with_capacity(self.offs.len());
168 offs.clone_from(&self.offs);
169 NAAudioBuffer { info: self.info, data: NABufferRef::new(data), offs, chmap: self.get_chmap(), len: self.len, stride: self.stride }
170 }
171 pub fn get_length(&self) -> usize { self.len }
172 }
173
174 impl NAAudioBuffer<u8> {
175 pub fn new_from_buf(info: NAAudioInfo, data: NABufferRef<Vec<u8>>, chmap: NAChannelMap) -> Self {
176 let len = data.len();
177 NAAudioBuffer { info, data, chmap, offs: Vec::new(), len, stride: 0 }
178 }
179 }
180
181 #[derive(Clone)]
182 pub enum NABufferType {
183 Video (NAVideoBufferRef<u8>),
184 Video16 (NAVideoBufferRef<u16>),
185 Video32 (NAVideoBufferRef<u32>),
186 VideoPacked(NAVideoBufferRef<u8>),
187 AudioU8 (NAAudioBuffer<u8>),
188 AudioI16 (NAAudioBuffer<i16>),
189 AudioI32 (NAAudioBuffer<i32>),
190 AudioF32 (NAAudioBuffer<f32>),
191 AudioPacked(NAAudioBuffer<u8>),
192 Data (NABufferRef<Vec<u8>>),
193 None,
194 }
195
196 impl NABufferType {
197 pub fn get_offset(&self, idx: usize) -> usize {
198 match *self {
199 NABufferType::Video(ref vb) => vb.get_offset(idx),
200 NABufferType::Video16(ref vb) => vb.get_offset(idx),
201 NABufferType::Video32(ref vb) => vb.get_offset(idx),
202 NABufferType::VideoPacked(ref vb) => vb.get_offset(idx),
203 NABufferType::AudioU8(ref ab) => ab.get_offset(idx),
204 NABufferType::AudioI16(ref ab) => ab.get_offset(idx),
205 NABufferType::AudioF32(ref ab) => ab.get_offset(idx),
206 NABufferType::AudioPacked(ref ab) => ab.get_offset(idx),
207 _ => 0,
208 }
209 }
210 pub fn get_video_info(&self) -> Option<NAVideoInfo> {
211 match *self {
212 NABufferType::Video(ref vb) => Some(vb.get_info()),
213 NABufferType::Video16(ref vb) => Some(vb.get_info()),
214 NABufferType::Video32(ref vb) => Some(vb.get_info()),
215 NABufferType::VideoPacked(ref vb) => Some(vb.get_info()),
216 _ => None,
217 }
218 }
219 pub fn get_vbuf(&self) -> Option<NAVideoBufferRef<u8>> {
220 match *self {
221 NABufferType::Video(ref vb) => Some(vb.clone()),
222 NABufferType::VideoPacked(ref vb) => Some(vb.clone()),
223 _ => None,
224 }
225 }
226 pub fn get_vbuf16(&self) -> Option<NAVideoBufferRef<u16>> {
227 match *self {
228 NABufferType::Video16(ref vb) => Some(vb.clone()),
229 _ => None,
230 }
231 }
232 pub fn get_vbuf32(&self) -> Option<NAVideoBufferRef<u32>> {
233 match *self {
234 NABufferType::Video32(ref vb) => Some(vb.clone()),
235 _ => None,
236 }
237 }
238 pub fn get_abuf_u8(&self) -> Option<NAAudioBuffer<u8>> {
239 match *self {
240 NABufferType::AudioU8(ref ab) => Some(ab.clone()),
241 NABufferType::AudioPacked(ref ab) => Some(ab.clone()),
242 _ => None,
243 }
244 }
245 pub fn get_abuf_i16(&self) -> Option<NAAudioBuffer<i16>> {
246 match *self {
247 NABufferType::AudioI16(ref ab) => Some(ab.clone()),
248 _ => None,
249 }
250 }
251 pub fn get_abuf_i32(&self) -> Option<NAAudioBuffer<i32>> {
252 match *self {
253 NABufferType::AudioI32(ref ab) => Some(ab.clone()),
254 _ => None,
255 }
256 }
257 pub fn get_abuf_f32(&self) -> Option<NAAudioBuffer<f32>> {
258 match *self {
259 NABufferType::AudioF32(ref ab) => Some(ab.clone()),
260 _ => None,
261 }
262 }
263 }
264
265 const NA_SIMPLE_VFRAME_COMPONENTS: usize = 4;
266 pub struct NASimpleVideoFrame<'a, T: Copy> {
267 pub width: [usize; NA_SIMPLE_VFRAME_COMPONENTS],
268 pub height: [usize; NA_SIMPLE_VFRAME_COMPONENTS],
269 pub flip: bool,
270 pub stride: [usize; NA_SIMPLE_VFRAME_COMPONENTS],
271 pub offset: [usize; NA_SIMPLE_VFRAME_COMPONENTS],
272 pub components: usize,
273 pub data: &'a mut [T],
274 }
275
276 impl<'a, T:Copy> NASimpleVideoFrame<'a, T> {
277 pub fn from_video_buf(vbuf: &'a mut NAVideoBuffer<T>) -> Option<Self> {
278 let vinfo = vbuf.get_info();
279 let components = vinfo.format.components as usize;
280 if components > NA_SIMPLE_VFRAME_COMPONENTS {
281 return None;
282 }
283 let mut w: [usize; NA_SIMPLE_VFRAME_COMPONENTS] = [0; NA_SIMPLE_VFRAME_COMPONENTS];
284 let mut h: [usize; NA_SIMPLE_VFRAME_COMPONENTS] = [0; NA_SIMPLE_VFRAME_COMPONENTS];
285 let mut s: [usize; NA_SIMPLE_VFRAME_COMPONENTS] = [0; NA_SIMPLE_VFRAME_COMPONENTS];
286 let mut o: [usize; NA_SIMPLE_VFRAME_COMPONENTS] = [0; NA_SIMPLE_VFRAME_COMPONENTS];
287 for comp in 0..components {
288 let (width, height) = vbuf.get_dimensions(comp);
289 w[comp] = width;
290 h[comp] = height;
291 s[comp] = vbuf.get_stride(comp);
292 o[comp] = vbuf.get_offset(comp);
293 }
294 let flip = vinfo.flipped;
295 Some(NASimpleVideoFrame {
296 width: w,
297 height: h,
298 flip,
299 stride: s,
300 offset: o,
301 components,
302 data: vbuf.data.as_mut_slice(),
303 })
304 }
305 }
306
307 #[derive(Debug,Clone,Copy,PartialEq)]
308 pub enum AllocatorError {
309 TooLargeDimensions,
310 FormatError,
311 }
312
313 pub fn alloc_video_buffer(vinfo: NAVideoInfo, align: u8) -> Result<NABufferType, AllocatorError> {
314 let fmt = &vinfo.format;
315 let mut new_size: usize = 0;
316 let mut offs: Vec<usize> = Vec::new();
317 let mut strides: Vec<usize> = Vec::new();
318
319 for i in 0..fmt.get_num_comp() {
320 if fmt.get_chromaton(i) == None { return Err(AllocatorError::FormatError); }
321 }
322
323 let align_mod = ((1 << align) as usize) - 1;
324 let width = ((vinfo.width as usize) + align_mod) & !align_mod;
325 let height = ((vinfo.height as usize) + align_mod) & !align_mod;
326 let mut max_depth = 0;
327 let mut all_packed = true;
328 let mut all_bytealigned = true;
329 for i in 0..fmt.get_num_comp() {
330 let ochr = fmt.get_chromaton(i);
331 if ochr.is_none() { continue; }
332 let chr = ochr.unwrap();
333 if !chr.is_packed() {
334 all_packed = false;
335 } else if ((chr.get_shift() + chr.get_depth()) & 7) != 0 {
336 all_bytealigned = false;
337 }
338 max_depth = max(max_depth, chr.get_depth());
339 }
340 let unfit_elem_size = match fmt.get_elem_size() {
341 2 | 4 => false,
342 _ => true,
343 };
344
345 //todo semi-packed like NV12
346 if fmt.is_paletted() {
347 //todo various-sized palettes?
348 let stride = vinfo.get_format().get_chromaton(0).unwrap().get_linesize(width);
349 let pic_sz = stride.checked_mul(height);
350 if pic_sz == None { return Err(AllocatorError::TooLargeDimensions); }
351 let pal_size = 256 * (fmt.get_elem_size() as usize);
352 let new_size = pic_sz.unwrap().checked_add(pal_size);
353 if new_size == None { return Err(AllocatorError::TooLargeDimensions); }
354 offs.push(0);
355 offs.push(stride * height);
356 strides.push(stride);
357 let data: Vec<u8> = vec![0; new_size.unwrap()];
358 let buf: NAVideoBuffer<u8> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
359 Ok(NABufferType::Video(buf.into_ref()))
360 } else if !all_packed {
361 for i in 0..fmt.get_num_comp() {
362 let ochr = fmt.get_chromaton(i);
363 if ochr.is_none() { continue; }
364 let chr = ochr.unwrap();
365 offs.push(new_size as usize);
366 let stride = chr.get_linesize(width);
367 let cur_h = chr.get_height(height);
368 let cur_sz = stride.checked_mul(cur_h);
369 if cur_sz == None { return Err(AllocatorError::TooLargeDimensions); }
370 let new_sz = new_size.checked_add(cur_sz.unwrap());
371 if new_sz == None { return Err(AllocatorError::TooLargeDimensions); }
372 new_size = new_sz.unwrap();
373 strides.push(stride);
374 }
375 if max_depth <= 8 {
376 let data: Vec<u8> = vec![0; new_size];
377 let buf: NAVideoBuffer<u8> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
378 Ok(NABufferType::Video(buf.into_ref()))
379 } else if max_depth <= 16 {
380 let data: Vec<u16> = vec![0; new_size];
381 let buf: NAVideoBuffer<u16> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
382 Ok(NABufferType::Video16(buf.into_ref()))
383 } else {
384 let data: Vec<u32> = vec![0; new_size];
385 let buf: NAVideoBuffer<u32> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
386 Ok(NABufferType::Video32(buf.into_ref()))
387 }
388 } else if all_bytealigned || unfit_elem_size {
389 let elem_sz = fmt.get_elem_size();
390 let line_sz = width.checked_mul(elem_sz as usize);
391 if line_sz == None { return Err(AllocatorError::TooLargeDimensions); }
392 let new_sz = line_sz.unwrap().checked_mul(height);
393 if new_sz == None { return Err(AllocatorError::TooLargeDimensions); }
394 new_size = new_sz.unwrap();
395 let data: Vec<u8> = vec![0; new_size];
396 strides.push(line_sz.unwrap());
397 let buf: NAVideoBuffer<u8> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
398 Ok(NABufferType::VideoPacked(buf.into_ref()))
399 } else {
400 let elem_sz = fmt.get_elem_size();
401 let new_sz = width.checked_mul(height);
402 if new_sz == None { return Err(AllocatorError::TooLargeDimensions); }
403 new_size = new_sz.unwrap();
404 match elem_sz {
405 2 => {
406 let data: Vec<u16> = vec![0; new_size];
407 strides.push(width);
408 let buf: NAVideoBuffer<u16> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
409 Ok(NABufferType::Video16(buf.into_ref()))
410 },
411 4 => {
412 let data: Vec<u32> = vec![0; new_size];
413 strides.push(width);
414 let buf: NAVideoBuffer<u32> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
415 Ok(NABufferType::Video32(buf.into_ref()))
416 },
417 _ => unreachable!(),
418 }
419 }
420 }
421
422 #[allow(clippy::collapsible_if)]
423 pub fn alloc_audio_buffer(ainfo: NAAudioInfo, nsamples: usize, chmap: NAChannelMap) -> Result<NABufferType, AllocatorError> {
424 let mut offs: Vec<usize> = Vec::new();
425 if ainfo.format.is_planar() || (ainfo.channels == 1 && (ainfo.format.get_bits() % 8) == 0) {
426 let len = nsamples.checked_mul(ainfo.channels as usize);
427 if len == None { return Err(AllocatorError::TooLargeDimensions); }
428 let length = len.unwrap();
429 let stride = nsamples;
430 for i in 0..ainfo.channels {
431 offs.push((i as usize) * stride);
432 }
433 if ainfo.format.is_float() {
434 if ainfo.format.get_bits() == 32 {
435 let data: Vec<f32> = vec![0.0; length];
436 let buf: NAAudioBuffer<f32> = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs, chmap, len: nsamples, stride };
437 Ok(NABufferType::AudioF32(buf))
438 } else {
439 Err(AllocatorError::TooLargeDimensions)
440 }
441 } else {
442 if ainfo.format.get_bits() == 8 && !ainfo.format.is_signed() {
443 let data: Vec<u8> = vec![0; length];
444 let buf: NAAudioBuffer<u8> = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs, chmap, len: nsamples, stride };
445 Ok(NABufferType::AudioU8(buf))
446 } else if ainfo.format.get_bits() == 16 && ainfo.format.is_signed() {
447 let data: Vec<i16> = vec![0; length];
448 let buf: NAAudioBuffer<i16> = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs, chmap, len: nsamples, stride };
449 Ok(NABufferType::AudioI16(buf))
450 } else {
451 Err(AllocatorError::TooLargeDimensions)
452 }
453 }
454 } else {
455 let len = nsamples.checked_mul(ainfo.channels as usize);
456 if len == None { return Err(AllocatorError::TooLargeDimensions); }
457 let length = ainfo.format.get_audio_size(len.unwrap() as u64);
458 let data: Vec<u8> = vec![0; length];
459 let buf: NAAudioBuffer<u8> = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs, chmap, len: nsamples, stride: 0 };
460 Ok(NABufferType::AudioPacked(buf))
461 }
462 }
463
464 pub fn alloc_data_buffer(size: usize) -> Result<NABufferType, AllocatorError> {
465 let data: Vec<u8> = vec![0; size];
466 let buf: NABufferRef<Vec<u8>> = NABufferRef::new(data);
467 Ok(NABufferType::Data(buf))
468 }
469
470 pub fn copy_buffer(buf: NABufferType) -> NABufferType {
471 buf.clone()
472 }
473
474 pub struct NAVideoBufferPool<T:Copy> {
475 pool: Vec<NAVideoBufferRef<T>>,
476 max_len: usize,
477 add_len: usize,
478 }
479
480 impl<T:Copy> NAVideoBufferPool<T> {
481 pub fn new(max_len: usize) -> Self {
482 Self {
483 pool: Vec::with_capacity(max_len),
484 max_len,
485 add_len: 0,
486 }
487 }
488 pub fn set_dec_bufs(&mut self, add_len: usize) {
489 self.add_len = add_len;
490 }
491 pub fn get_free(&mut self) -> Option<NAVideoBufferRef<T>> {
492 for e in self.pool.iter() {
493 if e.get_num_refs() == 1 {
494 return Some(e.clone());
495 }
496 }
497 None
498 }
499 pub fn get_copy(&mut self, rbuf: &NAVideoBufferRef<T>) -> Option<NAVideoBufferRef<T>> {
500 let mut dbuf = self.get_free()?;
501 dbuf.data.copy_from_slice(&rbuf.data);
502 Some(dbuf)
503 }
504 pub fn reset(&mut self) {
505 self.pool.truncate(0);
506 }
507 }
508
509 impl NAVideoBufferPool<u8> {
510 pub fn prealloc_video(&mut self, vinfo: NAVideoInfo, align: u8) -> Result<(), AllocatorError> {
511 let nbufs = self.max_len + self.add_len - self.pool.len();
512 for _ in 0..nbufs {
513 let vbuf = alloc_video_buffer(vinfo, align)?;
514 if let NABufferType::Video(buf) = vbuf {
515 self.pool.push(buf);
516 } else if let NABufferType::VideoPacked(buf) = vbuf {
517 self.pool.push(buf);
518 } else {
519 return Err(AllocatorError::FormatError);
520 }
521 }
522 Ok(())
523 }
524 }
525
526 impl NAVideoBufferPool<u16> {
527 pub fn prealloc_video(&mut self, vinfo: NAVideoInfo, align: u8) -> Result<(), AllocatorError> {
528 let nbufs = self.max_len + self.add_len - self.pool.len();
529 for _ in 0..nbufs {
530 let vbuf = alloc_video_buffer(vinfo, align)?;
531 if let NABufferType::Video16(buf) = vbuf {
532 self.pool.push(buf);
533 } else {
534 return Err(AllocatorError::FormatError);
535 }
536 }
537 Ok(())
538 }
539 }
540
541 impl NAVideoBufferPool<u32> {
542 pub fn prealloc_video(&mut self, vinfo: NAVideoInfo, align: u8) -> Result<(), AllocatorError> {
543 let nbufs = self.max_len + self.add_len - self.pool.len();
544 for _ in 0..nbufs {
545 let vbuf = alloc_video_buffer(vinfo, align)?;
546 if let NABufferType::Video32(buf) = vbuf {
547 self.pool.push(buf);
548 } else {
549 return Err(AllocatorError::FormatError);
550 }
551 }
552 Ok(())
553 }
554 }
555
556 #[allow(dead_code)]
557 #[derive(Clone)]
558 pub struct NACodecInfo {
559 name: &'static str,
560 properties: NACodecTypeInfo,
561 extradata: Option<Arc<Vec<u8>>>,
562 }
563
564 pub type NACodecInfoRef = Arc<NACodecInfo>;
565
566 impl NACodecInfo {
567 pub fn new(name: &'static str, p: NACodecTypeInfo, edata: Option<Vec<u8>>) -> Self {
568 let extradata = match edata {
569 None => None,
570 Some(vec) => Some(Arc::new(vec)),
571 };
572 NACodecInfo { name, properties: p, extradata }
573 }
574 pub fn new_ref(name: &'static str, p: NACodecTypeInfo, edata: Option<Arc<Vec<u8>>>) -> Self {
575 NACodecInfo { name, properties: p, extradata: edata }
576 }
577 pub fn into_ref(self) -> NACodecInfoRef { Arc::new(self) }
578 pub fn get_properties(&self) -> NACodecTypeInfo { self.properties }
579 pub fn get_extradata(&self) -> Option<Arc<Vec<u8>>> {
580 if let Some(ref vec) = self.extradata { return Some(vec.clone()); }
581 None
582 }
583 pub fn get_name(&self) -> &'static str { self.name }
584 pub fn is_video(&self) -> bool {
585 if let NACodecTypeInfo::Video(_) = self.properties { return true; }
586 false
587 }
588 pub fn is_audio(&self) -> bool {
589 if let NACodecTypeInfo::Audio(_) = self.properties { return true; }
590 false
591 }
592 pub fn new_dummy() -> Arc<Self> {
593 Arc::new(DUMMY_CODEC_INFO)
594 }
595 pub fn replace_info(&self, p: NACodecTypeInfo) -> Arc<Self> {
596 Arc::new(NACodecInfo { name: self.name, properties: p, extradata: self.extradata.clone() })
597 }
598 }
599
600 impl Default for NACodecInfo {
601 fn default() -> Self { DUMMY_CODEC_INFO }
602 }
603
604 impl fmt::Display for NACodecInfo {
605 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
606 let edata = match self.extradata.clone() {
607 None => "no extradata".to_string(),
608 Some(v) => format!("{} byte(s) of extradata", v.len()),
609 };
610 write!(f, "{}: {} {}", self.name, self.properties, edata)
611 }
612 }
613
614 pub const DUMMY_CODEC_INFO: NACodecInfo = NACodecInfo {
615 name: "none",
616 properties: NACodecTypeInfo::None,
617 extradata: None };
618
619 #[derive(Debug,Clone)]
620 pub enum NAValue {
621 None,
622 Int(i32),
623 Long(i64),
624 String(String),
625 Data(Arc<Vec<u8>>),
626 }
627
628 #[derive(Debug,Clone,Copy,PartialEq)]
629 #[allow(dead_code)]
630 pub enum FrameType {
631 I,
632 P,
633 B,
634 Skip,
635 Other,
636 }
637
638 impl fmt::Display for FrameType {
639 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
640 match *self {
641 FrameType::I => write!(f, "I"),
642 FrameType::P => write!(f, "P"),
643 FrameType::B => write!(f, "B"),
644 FrameType::Skip => write!(f, "skip"),
645 FrameType::Other => write!(f, "x"),
646 }
647 }
648 }
649
650 #[derive(Debug,Clone,Copy)]
651 pub struct NATimeInfo {
652 pts: Option<u64>,
653 dts: Option<u64>,
654 duration: Option<u64>,
655 tb_num: u32,
656 tb_den: u32,
657 }
658
659 impl NATimeInfo {
660 pub fn new(pts: Option<u64>, dts: Option<u64>, duration: Option<u64>, tb_num: u32, tb_den: u32) -> Self {
661 NATimeInfo { pts, dts, duration, tb_num, tb_den }
662 }
663 pub fn get_pts(&self) -> Option<u64> { self.pts }
664 pub fn get_dts(&self) -> Option<u64> { self.dts }
665 pub fn get_duration(&self) -> Option<u64> { self.duration }
666 pub fn set_pts(&mut self, pts: Option<u64>) { self.pts = pts; }
667 pub fn set_dts(&mut self, dts: Option<u64>) { self.dts = dts; }
668 pub fn set_duration(&mut self, dur: Option<u64>) { self.duration = dur; }
669
670 pub fn time_to_ts(time: u64, base: u64, tb_num: u32, tb_den: u32) -> u64 {
671 let tb_num = tb_num as u64;
672 let tb_den = tb_den as u64;
673 let tmp = time.checked_mul(tb_num);
674 if let Some(tmp) = tmp {
675 tmp / base / tb_den
676 } else {
677 let tmp = time.checked_mul(tb_num);
678 if let Some(tmp) = tmp {
679 tmp / base / tb_den
680 } else {
681 let coarse = time / base;
682 let tmp = coarse.checked_mul(tb_num);
683 if let Some(tmp) = tmp {
684 tmp / tb_den
685 } else {
686 (coarse / tb_den) * tb_num
687 }
688 }
689 }
690 }
691 pub fn ts_to_time(ts: u64, base: u64, tb_num: u32, tb_den: u32) -> u64 {
692 let tb_num = tb_num as u64;
693 let tb_den = tb_den as u64;
694 let tmp = ts.checked_mul(base);
695 if let Some(tmp) = tmp {
696 let tmp2 = tmp.checked_mul(tb_num);
697 if let Some(tmp2) = tmp2 {
698 tmp2 / tb_den
699 } else {
700 (tmp / tb_den) * tb_num
701 }
702 } else {
703 let tmp = ts.checked_mul(tb_num);
704 if let Some(tmp) = tmp {
705 (tmp / tb_den) * base
706 } else {
707 (ts / tb_den) * base * tb_num
708 }
709 }
710 }
711 }
712
713 #[allow(dead_code)]
714 #[derive(Clone)]
715 pub struct NAFrame {
716 ts: NATimeInfo,
717 id: i64,
718 buffer: NABufferType,
719 info: NACodecInfoRef,
720 ftype: FrameType,
721 key: bool,
722 options: HashMap<String, NAValue>,
723 }
724
725 pub type NAFrameRef = Arc<NAFrame>;
726
727 fn get_plane_size(info: &NAVideoInfo, idx: usize) -> (usize, usize) {
728 let chromaton = info.get_format().get_chromaton(idx);
729 if chromaton.is_none() { return (0, 0); }
730 let (hs, vs) = chromaton.unwrap().get_subsampling();
731 let w = (info.get_width() + ((1 << hs) - 1)) >> hs;
732 let h = (info.get_height() + ((1 << vs) - 1)) >> vs;
733 (w, h)
734 }
735
736 impl NAFrame {
737 pub fn new(ts: NATimeInfo,
738 ftype: FrameType,
739 keyframe: bool,
740 info: NACodecInfoRef,
741 options: HashMap<String, NAValue>,
742 buffer: NABufferType) -> Self {
743 NAFrame { ts, id: 0, buffer, info, ftype, key: keyframe, options }
744 }
745 pub fn get_info(&self) -> NACodecInfoRef { self.info.clone() }
746 pub fn get_frame_type(&self) -> FrameType { self.ftype }
747 pub fn is_keyframe(&self) -> bool { self.key }
748 pub fn set_frame_type(&mut self, ftype: FrameType) { self.ftype = ftype; }
749 pub fn set_keyframe(&mut self, key: bool) { self.key = key; }
750 pub fn get_time_information(&self) -> NATimeInfo { self.ts }
751 pub fn get_pts(&self) -> Option<u64> { self.ts.get_pts() }
752 pub fn get_dts(&self) -> Option<u64> { self.ts.get_dts() }
753 pub fn get_id(&self) -> i64 { self.id }
754 pub fn get_duration(&self) -> Option<u64> { self.ts.get_duration() }
755 pub fn set_pts(&mut self, pts: Option<u64>) { self.ts.set_pts(pts); }
756 pub fn set_dts(&mut self, dts: Option<u64>) { self.ts.set_dts(dts); }
757 pub fn set_id(&mut self, id: i64) { self.id = id; }
758 pub fn set_duration(&mut self, dur: Option<u64>) { self.ts.set_duration(dur); }
759
760 pub fn get_buffer(&self) -> NABufferType { self.buffer.clone() }
761
762 pub fn into_ref(self) -> NAFrameRef { Arc::new(self) }
763 }
764
765 impl fmt::Display for NAFrame {
766 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
767 let mut ostr = format!("frame type {}", self.ftype);
768 if let Some(pts) = self.ts.pts { ostr = format!("{} pts {}", ostr, pts); }
769 if let Some(dts) = self.ts.dts { ostr = format!("{} dts {}", ostr, dts); }
770 if let Some(dur) = self.ts.duration { ostr = format!("{} duration {}", ostr, dur); }
771 if self.key { ostr = format!("{} kf", ostr); }
772 write!(f, "[{}]", ostr)
773 }
774 }
775
776 /// Possible stream types.
777 #[derive(Debug,Clone,Copy,PartialEq)]
778 #[allow(dead_code)]
779 pub enum StreamType {
780 /// video stream
781 Video,
782 /// audio stream
783 Audio,
784 /// subtitles
785 Subtitles,
786 /// any data stream (or might be an unrecognized audio/video stream)
787 Data,
788 /// nonexistent stream
789 None,
790 }
791
792 impl fmt::Display for StreamType {
793 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
794 match *self {
795 StreamType::Video => write!(f, "Video"),
796 StreamType::Audio => write!(f, "Audio"),
797 StreamType::Subtitles => write!(f, "Subtitles"),
798 StreamType::Data => write!(f, "Data"),
799 StreamType::None => write!(f, "-"),
800 }
801 }
802 }
803
804 #[allow(dead_code)]
805 #[derive(Clone)]
806 pub struct NAStream {
807 media_type: StreamType,
808 id: u32,
809 num: usize,
810 info: NACodecInfoRef,
811 tb_num: u32,
812 tb_den: u32,
813 }
814
815 pub type NAStreamRef = Arc<NAStream>;
816
817 pub fn reduce_timebase(tb_num: u32, tb_den: u32) -> (u32, u32) {
818 if tb_num == 0 { return (tb_num, tb_den); }
819 if (tb_den % tb_num) == 0 { return (1, tb_den / tb_num); }
820
821 let mut a = tb_num;
822 let mut b = tb_den;
823
824 while a != b {
825 if a > b { a -= b; }
826 else if b > a { b -= a; }
827 }
828
829 (tb_num / a, tb_den / a)
830 }
831
832 impl NAStream {
833 pub fn new(mt: StreamType, id: u32, info: NACodecInfo, tb_num: u32, tb_den: u32) -> Self {
834 let (n, d) = reduce_timebase(tb_num, tb_den);
835 NAStream { media_type: mt, id, num: 0, info: info.into_ref(), tb_num: n, tb_den: d }
836 }
837 pub fn get_id(&self) -> u32 { self.id }
838 pub fn get_media_type(&self) -> StreamType { self.media_type }
839 pub fn get_num(&self) -> usize { self.num }
840 pub fn set_num(&mut self, num: usize) { self.num = num; }
841 pub fn get_info(&self) -> NACodecInfoRef { self.info.clone() }
842 pub fn get_timebase(&self) -> (u32, u32) { (self.tb_num, self.tb_den) }
843 pub fn set_timebase(&mut self, tb_num: u32, tb_den: u32) {
844 let (n, d) = reduce_timebase(tb_num, tb_den);
845 self.tb_num = n;
846 self.tb_den = d;
847 }
848 pub fn into_ref(self) -> NAStreamRef { Arc::new(self) }
849 }
850
851 impl fmt::Display for NAStream {
852 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
853 write!(f, "({}#{} @ {}/{} - {})", self.media_type, self.id, self.tb_num, self.tb_den, self.info.get_properties())
854 }
855 }
856
857 #[allow(dead_code)]
858 pub struct NAPacket {
859 stream: NAStreamRef,
860 ts: NATimeInfo,
861 buffer: NABufferRef<Vec<u8>>,
862 keyframe: bool,
863 // options: HashMap<String, NAValue<'a>>,
864 }
865
866 impl NAPacket {
867 pub fn new(str: NAStreamRef, ts: NATimeInfo, kf: bool, vec: Vec<u8>) -> Self {
868 // let mut vec: Vec<u8> = Vec::new();
869 // vec.resize(size, 0);
870 NAPacket { stream: str, ts, keyframe: kf, buffer: NABufferRef::new(vec) }
871 }
872 pub fn get_stream(&self) -> NAStreamRef { self.stream.clone() }
873 pub fn get_time_information(&self) -> NATimeInfo { self.ts }
874 pub fn get_pts(&self) -> Option<u64> { self.ts.get_pts() }
875 pub fn get_dts(&self) -> Option<u64> { self.ts.get_dts() }
876 pub fn get_duration(&self) -> Option<u64> { self.ts.get_duration() }
877 pub fn is_keyframe(&self) -> bool { self.keyframe }
878 pub fn get_buffer(&self) -> NABufferRef<Vec<u8>> { self.buffer.clone() }
879 }
880
881 impl Drop for NAPacket {
882 fn drop(&mut self) {}
883 }
884
885 impl fmt::Display for NAPacket {
886 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
887 let mut ostr = format!("[pkt for {} size {}", self.stream, self.buffer.len());
888 if let Some(pts) = self.ts.pts { ostr = format!("{} pts {}", ostr, pts); }
889 if let Some(dts) = self.ts.dts { ostr = format!("{} dts {}", ostr, dts); }
890 if let Some(dur) = self.ts.duration { ostr = format!("{} duration {}", ostr, dur); }
891 if self.keyframe { ostr = format!("{} kf", ostr); }
892 ostr += "]";
893 write!(f, "{}", ostr)
894 }
895 }
896
897 pub trait FrameFromPacket {
898 fn new_from_pkt(pkt: &NAPacket, info: NACodecInfoRef, buf: NABufferType) -> NAFrame;
899 fn fill_timestamps(&mut self, pkt: &NAPacket);
900 }
901
902 impl FrameFromPacket for NAFrame {
903 fn new_from_pkt(pkt: &NAPacket, info: NACodecInfoRef, buf: NABufferType) -> NAFrame {
904 NAFrame::new(pkt.ts, FrameType::Other, pkt.keyframe, info, HashMap::new(), buf)
905 }
906 fn fill_timestamps(&mut self, pkt: &NAPacket) {
907 self.ts = pkt.get_time_information();
908 }
909 }
910