support interleaved audio properly
[nihav.git] / nihav-core / src / frame.rs
1 //! Packets and decoded frames functionality.
2 use std::cmp::max;
3 //use std::collections::HashMap;
4 use std::fmt;
5 use std::sync::Arc;
6 pub use crate::formats::*;
7 pub use crate::refs::*;
8
9 /// Audio stream information.
10 #[allow(dead_code)]
11 #[derive(Clone,Copy,PartialEq)]
12 pub struct NAAudioInfo {
13 /// Sample rate.
14 pub sample_rate: u32,
15 /// Number of channels.
16 pub channels: u8,
17 /// Audio sample format.
18 pub format: NASoniton,
19 /// Length of one audio block in samples.
20 pub block_len: usize,
21 }
22
23 impl NAAudioInfo {
24 /// Constructs a new `NAAudioInfo` instance.
25 pub fn new(sr: u32, ch: u8, fmt: NASoniton, bl: usize) -> Self {
26 NAAudioInfo { sample_rate: sr, channels: ch, format: fmt, block_len: bl }
27 }
28 /// Returns audio sample rate.
29 pub fn get_sample_rate(&self) -> u32 { self.sample_rate }
30 /// Returns the number of channels.
31 pub fn get_channels(&self) -> u8 { self.channels }
32 /// Returns sample format.
33 pub fn get_format(&self) -> NASoniton { self.format }
34 /// Returns one audio block duration in samples.
35 pub fn get_block_len(&self) -> usize { self.block_len }
36 }
37
38 impl fmt::Display for NAAudioInfo {
39 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
40 write!(f, "{} Hz, {} ch", self.sample_rate, self.channels)
41 }
42 }
43
44 /// Video stream information.
45 #[allow(dead_code)]
46 #[derive(Clone,Copy,PartialEq)]
47 pub struct NAVideoInfo {
48 /// Picture width.
49 pub width: usize,
50 /// Picture height.
51 pub height: usize,
52 /// Picture is stored downside up.
53 pub flipped: bool,
54 /// Picture pixel format.
55 pub format: NAPixelFormaton,
56 }
57
58 impl NAVideoInfo {
59 /// Constructs a new `NAVideoInfo` instance.
60 pub fn new(w: usize, h: usize, flip: bool, fmt: NAPixelFormaton) -> Self {
61 NAVideoInfo { width: w, height: h, flipped: flip, format: fmt }
62 }
63 /// Returns picture width.
64 pub fn get_width(&self) -> usize { self.width as usize }
65 /// Returns picture height.
66 pub fn get_height(&self) -> usize { self.height as usize }
67 /// Returns picture orientation.
68 pub fn is_flipped(&self) -> bool { self.flipped }
69 /// Returns picture pixel format.
70 pub fn get_format(&self) -> NAPixelFormaton { self.format }
71 /// Sets new picture width.
72 pub fn set_width(&mut self, w: usize) { self.width = w; }
73 /// Sets new picture height.
74 pub fn set_height(&mut self, h: usize) { self.height = h; }
75 }
76
77 impl fmt::Display for NAVideoInfo {
78 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
79 write!(f, "{}x{}", self.width, self.height)
80 }
81 }
82
83 /// A list of possible stream information types.
84 #[derive(Clone,Copy,PartialEq)]
85 pub enum NACodecTypeInfo {
86 /// No codec present.
87 None,
88 /// Audio codec information.
89 Audio(NAAudioInfo),
90 /// Video codec information.
91 Video(NAVideoInfo),
92 }
93
94 impl NACodecTypeInfo {
95 /// Returns video stream information.
96 pub fn get_video_info(&self) -> Option<NAVideoInfo> {
97 match *self {
98 NACodecTypeInfo::Video(vinfo) => Some(vinfo),
99 _ => None,
100 }
101 }
102 /// Returns audio stream information.
103 pub fn get_audio_info(&self) -> Option<NAAudioInfo> {
104 match *self {
105 NACodecTypeInfo::Audio(ainfo) => Some(ainfo),
106 _ => None,
107 }
108 }
109 /// Reports whether the current stream is video stream.
110 pub fn is_video(&self) -> bool {
111 match *self {
112 NACodecTypeInfo::Video(_) => true,
113 _ => false,
114 }
115 }
116 /// Reports whether the current stream is audio stream.
117 pub fn is_audio(&self) -> bool {
118 match *self {
119 NACodecTypeInfo::Audio(_) => true,
120 _ => false,
121 }
122 }
123 }
124
125 impl fmt::Display for NACodecTypeInfo {
126 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
127 let ret = match *self {
128 NACodecTypeInfo::None => "".to_string(),
129 NACodecTypeInfo::Audio(fmt) => format!("{}", fmt),
130 NACodecTypeInfo::Video(fmt) => format!("{}", fmt),
131 };
132 write!(f, "{}", ret)
133 }
134 }
135
136 /// Decoded video frame.
137 ///
138 /// NihAV frames are stored in native type (8/16/32-bit elements) inside a single buffer.
139 /// In case of image with several components those components are stored sequentially and can be accessed in the buffer starting at corresponding component offset.
140 #[derive(Clone)]
141 pub struct NAVideoBuffer<T> {
142 info: NAVideoInfo,
143 data: NABufferRef<Vec<T>>,
144 offs: Vec<usize>,
145 strides: Vec<usize>,
146 }
147
148 impl<T: Clone> NAVideoBuffer<T> {
149 /// Returns the component offset (0 for all unavailable offsets).
150 pub fn get_offset(&self, idx: usize) -> usize {
151 if idx >= self.offs.len() { 0 }
152 else { self.offs[idx] }
153 }
154 /// Returns picture info.
155 pub fn get_info(&self) -> NAVideoInfo { self.info }
156 /// Returns an immutable reference to the data.
157 pub fn get_data(&self) -> &Vec<T> { self.data.as_ref() }
158 /// Returns a mutable reference to the data.
159 pub fn get_data_mut(&mut self) -> Option<&mut Vec<T>> { self.data.as_mut() }
160 /// Returns the number of components in picture format.
161 pub fn get_num_components(&self) -> usize { self.offs.len() }
162 /// Creates a copy of current `NAVideoBuffer`.
163 pub fn copy_buffer(&mut self) -> Self {
164 let mut data: Vec<T> = Vec::with_capacity(self.data.len());
165 data.clone_from(self.data.as_ref());
166 let mut offs: Vec<usize> = Vec::with_capacity(self.offs.len());
167 offs.clone_from(&self.offs);
168 let mut strides: Vec<usize> = Vec::with_capacity(self.strides.len());
169 strides.clone_from(&self.strides);
170 NAVideoBuffer { info: self.info, data: NABufferRef::new(data), offs, strides }
171 }
172 /// Returns stride (distance between subsequent lines) for the requested component.
173 pub fn get_stride(&self, idx: usize) -> usize {
174 if idx >= self.strides.len() { return 0; }
175 self.strides[idx]
176 }
177 /// Returns requested component dimensions.
178 pub fn get_dimensions(&self, idx: usize) -> (usize, usize) {
179 get_plane_size(&self.info, idx)
180 }
181 /// Converts current instance into buffer reference.
182 pub fn into_ref(self) -> NABufferRef<Self> {
183 NABufferRef::new(self)
184 }
185 }
186
187 /// A specialised type for reference-counted `NAVideoBuffer`.
188 pub type NAVideoBufferRef<T> = NABufferRef<NAVideoBuffer<T>>;
189
190 /// Decoded audio frame.
191 ///
192 /// NihAV frames are stored in native type (8/16/32-bit elements) inside a single buffer.
193 /// In case of planar audio samples for each channel are stored sequentially and can be accessed in the buffer starting at corresponding channel offset.
194 #[derive(Clone)]
195 pub struct NAAudioBuffer<T> {
196 info: NAAudioInfo,
197 data: NABufferRef<Vec<T>>,
198 offs: Vec<usize>,
199 stride: usize,
200 step: usize,
201 chmap: NAChannelMap,
202 len: usize,
203 }
204
205 impl<T: Clone> NAAudioBuffer<T> {
206 /// Returns the start position of requested channel data.
207 pub fn get_offset(&self, idx: usize) -> usize {
208 if idx >= self.offs.len() { 0 }
209 else { self.offs[idx] }
210 }
211 /// Returns the distance between the start of one channel and the next one.
212 pub fn get_stride(&self) -> usize { self.stride }
213 /// Returns the distance between the samples in one channel.
214 pub fn get_step(&self) -> usize { self.step }
215 /// Returns audio format information.
216 pub fn get_info(&self) -> NAAudioInfo { self.info }
217 /// Returns channel map.
218 pub fn get_chmap(&self) -> &NAChannelMap { &self.chmap }
219 /// Returns an immutable reference to the data.
220 pub fn get_data(&self) -> &Vec<T> { self.data.as_ref() }
221 /// Returns a mutable reference to the data.
222 pub fn get_data_mut(&mut self) -> Option<&mut Vec<T>> { self.data.as_mut() }
223 /// Clones current `NAAudioBuffer` into a new one.
224 pub fn copy_buffer(&mut self) -> Self {
225 let mut data: Vec<T> = Vec::with_capacity(self.data.len());
226 data.clone_from(self.data.as_ref());
227 let mut offs: Vec<usize> = Vec::with_capacity(self.offs.len());
228 offs.clone_from(&self.offs);
229 NAAudioBuffer { info: self.info, data: NABufferRef::new(data), offs, chmap: self.get_chmap().clone(), len: self.len, stride: self.stride, step: self.step }
230 }
231 /// Return the length of frame in samples.
232 pub fn get_length(&self) -> usize { self.len }
233 }
234
235 impl NAAudioBuffer<u8> {
236 /// Constructs a new `NAAudioBuffer` instance.
237 pub fn new_from_buf(info: NAAudioInfo, data: NABufferRef<Vec<u8>>, chmap: NAChannelMap) -> Self {
238 let len = data.len();
239 NAAudioBuffer { info, data, chmap, offs: Vec::new(), len, stride: 0, step: 0 }
240 }
241 }
242
243 /// A list of possible decoded frame types.
244 #[derive(Clone)]
245 pub enum NABufferType {
246 /// 8-bit video buffer.
247 Video (NAVideoBufferRef<u8>),
248 /// 16-bit video buffer (i.e. every component or packed pixel fits into 16 bits).
249 Video16 (NAVideoBufferRef<u16>),
250 /// 32-bit video buffer (i.e. every component or packed pixel fits into 32 bits).
251 Video32 (NAVideoBufferRef<u32>),
252 /// Packed video buffer.
253 VideoPacked(NAVideoBufferRef<u8>),
254 /// Audio buffer with 8-bit unsigned integer audio.
255 AudioU8 (NAAudioBuffer<u8>),
256 /// Audio buffer with 16-bit signed integer audio.
257 AudioI16 (NAAudioBuffer<i16>),
258 /// Audio buffer with 32-bit signed integer audio.
259 AudioI32 (NAAudioBuffer<i32>),
260 /// Audio buffer with 32-bit floating point audio.
261 AudioF32 (NAAudioBuffer<f32>),
262 /// Packed audio buffer.
263 AudioPacked(NAAudioBuffer<u8>),
264 /// Buffer with generic data (e.g. subtitles).
265 Data (NABufferRef<Vec<u8>>),
266 /// No data present.
267 None,
268 }
269
270 impl NABufferType {
271 /// Returns the offset to the requested component or channel.
272 pub fn get_offset(&self, idx: usize) -> usize {
273 match *self {
274 NABufferType::Video(ref vb) => vb.get_offset(idx),
275 NABufferType::Video16(ref vb) => vb.get_offset(idx),
276 NABufferType::Video32(ref vb) => vb.get_offset(idx),
277 NABufferType::VideoPacked(ref vb) => vb.get_offset(idx),
278 NABufferType::AudioU8(ref ab) => ab.get_offset(idx),
279 NABufferType::AudioI16(ref ab) => ab.get_offset(idx),
280 NABufferType::AudioF32(ref ab) => ab.get_offset(idx),
281 NABufferType::AudioPacked(ref ab) => ab.get_offset(idx),
282 _ => 0,
283 }
284 }
285 /// Returns information for video frames.
286 pub fn get_video_info(&self) -> Option<NAVideoInfo> {
287 match *self {
288 NABufferType::Video(ref vb) => Some(vb.get_info()),
289 NABufferType::Video16(ref vb) => Some(vb.get_info()),
290 NABufferType::Video32(ref vb) => Some(vb.get_info()),
291 NABufferType::VideoPacked(ref vb) => Some(vb.get_info()),
292 _ => None,
293 }
294 }
295 /// Returns reference to 8-bit (or packed) video buffer.
296 pub fn get_vbuf(&self) -> Option<NAVideoBufferRef<u8>> {
297 match *self {
298 NABufferType::Video(ref vb) => Some(vb.clone()),
299 NABufferType::VideoPacked(ref vb) => Some(vb.clone()),
300 _ => None,
301 }
302 }
303 /// Returns reference to 16-bit video buffer.
304 pub fn get_vbuf16(&self) -> Option<NAVideoBufferRef<u16>> {
305 match *self {
306 NABufferType::Video16(ref vb) => Some(vb.clone()),
307 _ => None,
308 }
309 }
310 /// Returns reference to 32-bit video buffer.
311 pub fn get_vbuf32(&self) -> Option<NAVideoBufferRef<u32>> {
312 match *self {
313 NABufferType::Video32(ref vb) => Some(vb.clone()),
314 _ => None,
315 }
316 }
317 /// Returns information for audio frames.
318 pub fn get_audio_info(&self) -> Option<NAAudioInfo> {
319 match *self {
320 NABufferType::AudioU8(ref ab) => Some(ab.get_info()),
321 NABufferType::AudioI16(ref ab) => Some(ab.get_info()),
322 NABufferType::AudioI32(ref ab) => Some(ab.get_info()),
323 NABufferType::AudioF32(ref ab) => Some(ab.get_info()),
324 NABufferType::AudioPacked(ref ab) => Some(ab.get_info()),
325 _ => None,
326 }
327 }
328 /// Returns audio channel map.
329 pub fn get_chmap(&self) -> Option<&NAChannelMap> {
330 match *self {
331 NABufferType::AudioU8(ref ab) => Some(ab.get_chmap()),
332 NABufferType::AudioI16(ref ab) => Some(ab.get_chmap()),
333 NABufferType::AudioI32(ref ab) => Some(ab.get_chmap()),
334 NABufferType::AudioF32(ref ab) => Some(ab.get_chmap()),
335 NABufferType::AudioPacked(ref ab) => Some(ab.get_chmap()),
336 _ => None,
337 }
338 }
339 /// Returns audio frame duration in samples.
340 pub fn get_audio_length(&self) -> usize {
341 match *self {
342 NABufferType::AudioU8(ref ab) => ab.get_length(),
343 NABufferType::AudioI16(ref ab) => ab.get_length(),
344 NABufferType::AudioI32(ref ab) => ab.get_length(),
345 NABufferType::AudioF32(ref ab) => ab.get_length(),
346 NABufferType::AudioPacked(ref ab) => ab.get_length(),
347 _ => 0,
348 }
349 }
350 /// Returns the distance between starts of two channels.
351 pub fn get_audio_stride(&self) -> usize {
352 match *self {
353 NABufferType::AudioU8(ref ab) => ab.get_stride(),
354 NABufferType::AudioI16(ref ab) => ab.get_stride(),
355 NABufferType::AudioI32(ref ab) => ab.get_stride(),
356 NABufferType::AudioF32(ref ab) => ab.get_stride(),
357 NABufferType::AudioPacked(ref ab) => ab.get_stride(),
358 _ => 0,
359 }
360 }
361 /// Returns the distance between two samples in one channel.
362 pub fn get_audio_step(&self) -> usize {
363 match *self {
364 NABufferType::AudioU8(ref ab) => ab.get_step(),
365 NABufferType::AudioI16(ref ab) => ab.get_step(),
366 NABufferType::AudioI32(ref ab) => ab.get_step(),
367 NABufferType::AudioF32(ref ab) => ab.get_step(),
368 NABufferType::AudioPacked(ref ab) => ab.get_step(),
369 _ => 0,
370 }
371 }
372 /// Returns reference to 8-bit (or packed) audio buffer.
373 pub fn get_abuf_u8(&self) -> Option<NAAudioBuffer<u8>> {
374 match *self {
375 NABufferType::AudioU8(ref ab) => Some(ab.clone()),
376 NABufferType::AudioPacked(ref ab) => Some(ab.clone()),
377 _ => None,
378 }
379 }
380 /// Returns reference to 16-bit audio buffer.
381 pub fn get_abuf_i16(&self) -> Option<NAAudioBuffer<i16>> {
382 match *self {
383 NABufferType::AudioI16(ref ab) => Some(ab.clone()),
384 _ => None,
385 }
386 }
387 /// Returns reference to 32-bit integer audio buffer.
388 pub fn get_abuf_i32(&self) -> Option<NAAudioBuffer<i32>> {
389 match *self {
390 NABufferType::AudioI32(ref ab) => Some(ab.clone()),
391 _ => None,
392 }
393 }
394 /// Returns reference to 32-bit floating point audio buffer.
395 pub fn get_abuf_f32(&self) -> Option<NAAudioBuffer<f32>> {
396 match *self {
397 NABufferType::AudioF32(ref ab) => Some(ab.clone()),
398 _ => None,
399 }
400 }
401 }
402
403 const NA_SIMPLE_VFRAME_COMPONENTS: usize = 4;
404 /// Simplified decoded frame data.
405 pub struct NASimpleVideoFrame<'a, T: Copy> {
406 /// Widths of each picture component.
407 pub width: [usize; NA_SIMPLE_VFRAME_COMPONENTS],
408 /// Heights of each picture component.
409 pub height: [usize; NA_SIMPLE_VFRAME_COMPONENTS],
410 /// Orientation (upside-down or downside-up) flag.
411 pub flip: bool,
412 /// Strides for each component.
413 pub stride: [usize; NA_SIMPLE_VFRAME_COMPONENTS],
414 /// Start of each component.
415 pub offset: [usize; NA_SIMPLE_VFRAME_COMPONENTS],
416 /// Number of components.
417 pub components: usize,
418 /// Pointer to the picture pixel data.
419 pub data: &'a mut [T],
420 }
421
422 impl<'a, T:Copy> NASimpleVideoFrame<'a, T> {
423 /// Constructs a new instance of `NASimpleVideoFrame` from `NAVideoBuffer`.
424 pub fn from_video_buf(vbuf: &'a mut NAVideoBuffer<T>) -> Option<Self> {
425 let vinfo = vbuf.get_info();
426 let components = vinfo.format.components as usize;
427 if components > NA_SIMPLE_VFRAME_COMPONENTS {
428 return None;
429 }
430 let mut w: [usize; NA_SIMPLE_VFRAME_COMPONENTS] = [0; NA_SIMPLE_VFRAME_COMPONENTS];
431 let mut h: [usize; NA_SIMPLE_VFRAME_COMPONENTS] = [0; NA_SIMPLE_VFRAME_COMPONENTS];
432 let mut s: [usize; NA_SIMPLE_VFRAME_COMPONENTS] = [0; NA_SIMPLE_VFRAME_COMPONENTS];
433 let mut o: [usize; NA_SIMPLE_VFRAME_COMPONENTS] = [0; NA_SIMPLE_VFRAME_COMPONENTS];
434 for comp in 0..components {
435 let (width, height) = vbuf.get_dimensions(comp);
436 w[comp] = width;
437 h[comp] = height;
438 s[comp] = vbuf.get_stride(comp);
439 o[comp] = vbuf.get_offset(comp);
440 }
441 let flip = vinfo.flipped;
442 Some(NASimpleVideoFrame {
443 width: w,
444 height: h,
445 flip,
446 stride: s,
447 offset: o,
448 components,
449 data: vbuf.data.as_mut_slice(),
450 })
451 }
452 }
453
454 /// A list of possible frame allocator errors.
455 #[derive(Debug,Clone,Copy,PartialEq)]
456 pub enum AllocatorError {
457 /// Requested picture dimensions are too large.
458 TooLargeDimensions,
459 /// Invalid input format.
460 FormatError,
461 }
462
463 /// Constructs a new video buffer with requested format.
464 ///
465 /// `align` is power of two alignment for image. E.g. the value of 5 means that frame dimensions will be padded to be multiple of 32.
466 pub fn alloc_video_buffer(vinfo: NAVideoInfo, align: u8) -> Result<NABufferType, AllocatorError> {
467 let fmt = &vinfo.format;
468 let mut new_size: usize = 0;
469 let mut offs: Vec<usize> = Vec::new();
470 let mut strides: Vec<usize> = Vec::new();
471
472 for i in 0..fmt.get_num_comp() {
473 if fmt.get_chromaton(i) == None { return Err(AllocatorError::FormatError); }
474 }
475
476 let align_mod = ((1 << align) as usize) - 1;
477 let width = ((vinfo.width as usize) + align_mod) & !align_mod;
478 let height = ((vinfo.height as usize) + align_mod) & !align_mod;
479 let mut max_depth = 0;
480 let mut all_packed = true;
481 let mut all_bytealigned = true;
482 for i in 0..fmt.get_num_comp() {
483 let ochr = fmt.get_chromaton(i);
484 if ochr.is_none() { continue; }
485 let chr = ochr.unwrap();
486 if !chr.is_packed() {
487 all_packed = false;
488 } else if ((chr.get_shift() + chr.get_depth()) & 7) != 0 {
489 all_bytealigned = false;
490 }
491 max_depth = max(max_depth, chr.get_depth());
492 }
493 let unfit_elem_size = match fmt.get_elem_size() {
494 2 | 4 => false,
495 _ => true,
496 };
497
498 //todo semi-packed like NV12
499 if fmt.is_paletted() {
500 //todo various-sized palettes?
501 let stride = vinfo.get_format().get_chromaton(0).unwrap().get_linesize(width);
502 let pic_sz = stride.checked_mul(height);
503 if pic_sz == None { return Err(AllocatorError::TooLargeDimensions); }
504 let pal_size = 256 * (fmt.get_elem_size() as usize);
505 let new_size = pic_sz.unwrap().checked_add(pal_size);
506 if new_size == None { return Err(AllocatorError::TooLargeDimensions); }
507 offs.push(0);
508 offs.push(stride * height);
509 strides.push(stride);
510 let data: Vec<u8> = vec![0; new_size.unwrap()];
511 let buf: NAVideoBuffer<u8> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
512 Ok(NABufferType::Video(buf.into_ref()))
513 } else if !all_packed {
514 for i in 0..fmt.get_num_comp() {
515 let ochr = fmt.get_chromaton(i);
516 if ochr.is_none() { continue; }
517 let chr = ochr.unwrap();
518 offs.push(new_size as usize);
519 let stride = chr.get_linesize(width);
520 let cur_h = chr.get_height(height);
521 let cur_sz = stride.checked_mul(cur_h);
522 if cur_sz == None { return Err(AllocatorError::TooLargeDimensions); }
523 let new_sz = new_size.checked_add(cur_sz.unwrap());
524 if new_sz == None { return Err(AllocatorError::TooLargeDimensions); }
525 new_size = new_sz.unwrap();
526 strides.push(stride);
527 }
528 if max_depth <= 8 {
529 let data: Vec<u8> = vec![0; new_size];
530 let buf: NAVideoBuffer<u8> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
531 Ok(NABufferType::Video(buf.into_ref()))
532 } else if max_depth <= 16 {
533 let data: Vec<u16> = vec![0; new_size];
534 let buf: NAVideoBuffer<u16> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
535 Ok(NABufferType::Video16(buf.into_ref()))
536 } else {
537 let data: Vec<u32> = vec![0; new_size];
538 let buf: NAVideoBuffer<u32> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
539 Ok(NABufferType::Video32(buf.into_ref()))
540 }
541 } else if all_bytealigned || unfit_elem_size {
542 let elem_sz = fmt.get_elem_size();
543 let line_sz = width.checked_mul(elem_sz as usize);
544 if line_sz == None { return Err(AllocatorError::TooLargeDimensions); }
545 let new_sz = line_sz.unwrap().checked_mul(height);
546 if new_sz == None { return Err(AllocatorError::TooLargeDimensions); }
547 new_size = new_sz.unwrap();
548 let data: Vec<u8> = vec![0; new_size];
549 strides.push(line_sz.unwrap());
550 let buf: NAVideoBuffer<u8> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
551 Ok(NABufferType::VideoPacked(buf.into_ref()))
552 } else {
553 let elem_sz = fmt.get_elem_size();
554 let new_sz = width.checked_mul(height);
555 if new_sz == None { return Err(AllocatorError::TooLargeDimensions); }
556 new_size = new_sz.unwrap();
557 match elem_sz {
558 2 => {
559 let data: Vec<u16> = vec![0; new_size];
560 strides.push(width);
561 let buf: NAVideoBuffer<u16> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
562 Ok(NABufferType::Video16(buf.into_ref()))
563 },
564 4 => {
565 let data: Vec<u32> = vec![0; new_size];
566 strides.push(width);
567 let buf: NAVideoBuffer<u32> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
568 Ok(NABufferType::Video32(buf.into_ref()))
569 },
570 _ => unreachable!(),
571 }
572 }
573 }
574
575 /// Constructs a new audio buffer for the requested format and length.
576 #[allow(clippy::collapsible_if)]
577 pub fn alloc_audio_buffer(ainfo: NAAudioInfo, nsamples: usize, chmap: NAChannelMap) -> Result<NABufferType, AllocatorError> {
578 let mut offs: Vec<usize> = Vec::new();
579 if ainfo.format.is_planar() || ((ainfo.format.get_bits() % 8) == 0) {
580 let len = nsamples.checked_mul(ainfo.channels as usize);
581 if len == None { return Err(AllocatorError::TooLargeDimensions); }
582 let length = len.unwrap();
583 let stride;
584 let step;
585 if ainfo.format.is_planar() {
586 stride = nsamples;
587 step = 1;
588 for i in 0..ainfo.channels {
589 offs.push((i as usize) * stride);
590 }
591 } else {
592 stride = 1;
593 step = ainfo.channels as usize;
594 for i in 0..ainfo.channels {
595 offs.push(i as usize);
596 }
597 }
598 if ainfo.format.is_float() {
599 if ainfo.format.get_bits() == 32 {
600 let data: Vec<f32> = vec![0.0; length];
601 let buf: NAAudioBuffer<f32> = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs, chmap, len: nsamples, stride, step };
602 Ok(NABufferType::AudioF32(buf))
603 } else {
604 Err(AllocatorError::TooLargeDimensions)
605 }
606 } else {
607 if ainfo.format.get_bits() == 8 && !ainfo.format.is_signed() {
608 let data: Vec<u8> = vec![0; length];
609 let buf: NAAudioBuffer<u8> = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs, chmap, len: nsamples, stride, step };
610 Ok(NABufferType::AudioU8(buf))
611 } else if ainfo.format.get_bits() == 16 && ainfo.format.is_signed() {
612 let data: Vec<i16> = vec![0; length];
613 let buf: NAAudioBuffer<i16> = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs, chmap, len: nsamples, stride, step };
614 Ok(NABufferType::AudioI16(buf))
615 } else {
616 Err(AllocatorError::TooLargeDimensions)
617 }
618 }
619 } else {
620 let len = nsamples.checked_mul(ainfo.channels as usize);
621 if len == None { return Err(AllocatorError::TooLargeDimensions); }
622 let length = ainfo.format.get_audio_size(len.unwrap() as u64);
623 let data: Vec<u8> = vec![0; length];
624 let buf: NAAudioBuffer<u8> = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs, chmap, len: nsamples, stride: 0, step: 0 };
625 Ok(NABufferType::AudioPacked(buf))
626 }
627 }
628
629 /// Constructs a new buffer for generic data.
630 pub fn alloc_data_buffer(size: usize) -> Result<NABufferType, AllocatorError> {
631 let data: Vec<u8> = vec![0; size];
632 let buf: NABufferRef<Vec<u8>> = NABufferRef::new(data);
633 Ok(NABufferType::Data(buf))
634 }
635
636 /// Creates a clone of current buffer.
637 pub fn copy_buffer(buf: NABufferType) -> NABufferType {
638 buf.clone()
639 }
640
641 /// Video frame pool.
642 ///
643 /// This structure allows codec to effectively reuse old frames instead of allocating and de-allocating frames every time.
644 /// Caller can also reserve some frames for its own purposes e.g. display queue.
645 pub struct NAVideoBufferPool<T:Copy> {
646 pool: Vec<NAVideoBufferRef<T>>,
647 max_len: usize,
648 add_len: usize,
649 }
650
651 impl<T:Copy> NAVideoBufferPool<T> {
652 /// Constructs a new `NAVideoBufferPool` instance.
653 pub fn new(max_len: usize) -> Self {
654 Self {
655 pool: Vec::with_capacity(max_len),
656 max_len,
657 add_len: 0,
658 }
659 }
660 /// Sets the number of buffers reserved for the user.
661 pub fn set_dec_bufs(&mut self, add_len: usize) {
662 self.add_len = add_len;
663 }
664 /// Returns an unused buffer from the pool.
665 pub fn get_free(&mut self) -> Option<NAVideoBufferRef<T>> {
666 for e in self.pool.iter() {
667 if e.get_num_refs() == 1 {
668 return Some(e.clone());
669 }
670 }
671 None
672 }
673 /// Clones provided frame data into a free pool frame.
674 pub fn get_copy(&mut self, rbuf: &NAVideoBufferRef<T>) -> Option<NAVideoBufferRef<T>> {
675 let mut dbuf = self.get_free()?;
676 dbuf.data.copy_from_slice(&rbuf.data);
677 Some(dbuf)
678 }
679 /// Clears the pool from all frames.
680 pub fn reset(&mut self) {
681 self.pool.truncate(0);
682 }
683 }
684
685 impl NAVideoBufferPool<u8> {
686 /// Allocates the target amount of video frames using [`alloc_video_buffer`].
687 ///
688 /// [`alloc_video_buffer`]: ./fn.alloc_video_buffer.html
689 pub fn prealloc_video(&mut self, vinfo: NAVideoInfo, align: u8) -> Result<(), AllocatorError> {
690 let nbufs = self.max_len + self.add_len - self.pool.len();
691 for _ in 0..nbufs {
692 let vbuf = alloc_video_buffer(vinfo, align)?;
693 if let NABufferType::Video(buf) = vbuf {
694 self.pool.push(buf);
695 } else if let NABufferType::VideoPacked(buf) = vbuf {
696 self.pool.push(buf);
697 } else {
698 return Err(AllocatorError::FormatError);
699 }
700 }
701 Ok(())
702 }
703 }
704
705 impl NAVideoBufferPool<u16> {
706 /// Allocates the target amount of video frames using [`alloc_video_buffer`].
707 ///
708 /// [`alloc_video_buffer`]: ./fn.alloc_video_buffer.html
709 pub fn prealloc_video(&mut self, vinfo: NAVideoInfo, align: u8) -> Result<(), AllocatorError> {
710 let nbufs = self.max_len + self.add_len - self.pool.len();
711 for _ in 0..nbufs {
712 let vbuf = alloc_video_buffer(vinfo, align)?;
713 if let NABufferType::Video16(buf) = vbuf {
714 self.pool.push(buf);
715 } else {
716 return Err(AllocatorError::FormatError);
717 }
718 }
719 Ok(())
720 }
721 }
722
723 impl NAVideoBufferPool<u32> {
724 /// Allocates the target amount of video frames using [`alloc_video_buffer`].
725 ///
726 /// [`alloc_video_buffer`]: ./fn.alloc_video_buffer.html
727 pub fn prealloc_video(&mut self, vinfo: NAVideoInfo, align: u8) -> Result<(), AllocatorError> {
728 let nbufs = self.max_len + self.add_len - self.pool.len();
729 for _ in 0..nbufs {
730 let vbuf = alloc_video_buffer(vinfo, align)?;
731 if let NABufferType::Video32(buf) = vbuf {
732 self.pool.push(buf);
733 } else {
734 return Err(AllocatorError::FormatError);
735 }
736 }
737 Ok(())
738 }
739 }
740
741 /// Information about codec contained in a stream.
742 #[allow(dead_code)]
743 #[derive(Clone)]
744 pub struct NACodecInfo {
745 name: &'static str,
746 properties: NACodecTypeInfo,
747 extradata: Option<Arc<Vec<u8>>>,
748 }
749
750 /// A specialised type for reference-counted `NACodecInfo`.
751 pub type NACodecInfoRef = Arc<NACodecInfo>;
752
753 impl NACodecInfo {
754 /// Constructs a new instance of `NACodecInfo`.
755 pub fn new(name: &'static str, p: NACodecTypeInfo, edata: Option<Vec<u8>>) -> Self {
756 let extradata = match edata {
757 None => None,
758 Some(vec) => Some(Arc::new(vec)),
759 };
760 NACodecInfo { name, properties: p, extradata }
761 }
762 /// Constructs a new reference-counted instance of `NACodecInfo`.
763 pub fn new_ref(name: &'static str, p: NACodecTypeInfo, edata: Option<Arc<Vec<u8>>>) -> Self {
764 NACodecInfo { name, properties: p, extradata: edata }
765 }
766 /// Converts current instance into a reference-counted one.
767 pub fn into_ref(self) -> NACodecInfoRef { Arc::new(self) }
768 /// Returns codec information.
769 pub fn get_properties(&self) -> NACodecTypeInfo { self.properties }
770 /// Returns additional initialisation data required by the codec.
771 pub fn get_extradata(&self) -> Option<Arc<Vec<u8>>> {
772 if let Some(ref vec) = self.extradata { return Some(vec.clone()); }
773 None
774 }
775 /// Returns codec name.
776 pub fn get_name(&self) -> &'static str { self.name }
777 /// Reports whether it is a video codec.
778 pub fn is_video(&self) -> bool {
779 if let NACodecTypeInfo::Video(_) = self.properties { return true; }
780 false
781 }
782 /// Reports whether it is an audio codec.
783 pub fn is_audio(&self) -> bool {
784 if let NACodecTypeInfo::Audio(_) = self.properties { return true; }
785 false
786 }
787 /// Constructs a new empty reference-counted instance of `NACodecInfo`.
788 pub fn new_dummy() -> Arc<Self> {
789 Arc::new(DUMMY_CODEC_INFO)
790 }
791 /// Updates codec infomation.
792 pub fn replace_info(&self, p: NACodecTypeInfo) -> Arc<Self> {
793 Arc::new(NACodecInfo { name: self.name, properties: p, extradata: self.extradata.clone() })
794 }
795 }
796
797 impl Default for NACodecInfo {
798 fn default() -> Self { DUMMY_CODEC_INFO }
799 }
800
801 impl fmt::Display for NACodecInfo {
802 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
803 let edata = match self.extradata.clone() {
804 None => "no extradata".to_string(),
805 Some(v) => format!("{} byte(s) of extradata", v.len()),
806 };
807 write!(f, "{}: {} {}", self.name, self.properties, edata)
808 }
809 }
810
811 /// Default empty codec information.
812 pub const DUMMY_CODEC_INFO: NACodecInfo = NACodecInfo {
813 name: "none",
814 properties: NACodecTypeInfo::None,
815 extradata: None };
816
817 /// A list of accepted option values.
818 #[derive(Debug,Clone)]
819 pub enum NAValue {
820 /// Empty value.
821 None,
822 /// Integer value.
823 Int(i32),
824 /// Long integer value.
825 Long(i64),
826 /// String value.
827 String(String),
828 /// Binary data value.
829 Data(Arc<Vec<u8>>),
830 }
831
832 /// A list of recognized frame types.
833 #[derive(Debug,Clone,Copy,PartialEq)]
834 #[allow(dead_code)]
835 pub enum FrameType {
836 /// Intra frame type.
837 I,
838 /// Inter frame type.
839 P,
840 /// Bidirectionally predicted frame.
841 B,
842 /// Skip frame.
843 ///
844 /// When such frame is encountered then last frame should be used again if it is needed.
845 Skip,
846 /// Some other frame type.
847 Other,
848 }
849
850 impl fmt::Display for FrameType {
851 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
852 match *self {
853 FrameType::I => write!(f, "I"),
854 FrameType::P => write!(f, "P"),
855 FrameType::B => write!(f, "B"),
856 FrameType::Skip => write!(f, "skip"),
857 FrameType::Other => write!(f, "x"),
858 }
859 }
860 }
861
862 /// Timestamp information.
863 #[derive(Debug,Clone,Copy)]
864 pub struct NATimeInfo {
865 /// Presentation timestamp.
866 pub pts: Option<u64>,
867 /// Decode timestamp.
868 pub dts: Option<u64>,
869 /// Duration (in timebase units).
870 pub duration: Option<u64>,
871 /// Timebase numerator.
872 pub tb_num: u32,
873 /// Timebase denominator.
874 pub tb_den: u32,
875 }
876
877 impl NATimeInfo {
878 /// Constructs a new `NATimeInfo` instance.
879 pub fn new(pts: Option<u64>, dts: Option<u64>, duration: Option<u64>, tb_num: u32, tb_den: u32) -> Self {
880 NATimeInfo { pts, dts, duration, tb_num, tb_den }
881 }
882 /// Returns presentation timestamp.
883 pub fn get_pts(&self) -> Option<u64> { self.pts }
884 /// Returns decoding timestamp.
885 pub fn get_dts(&self) -> Option<u64> { self.dts }
886 /// Returns duration.
887 pub fn get_duration(&self) -> Option<u64> { self.duration }
888 /// Sets new presentation timestamp.
889 pub fn set_pts(&mut self, pts: Option<u64>) { self.pts = pts; }
890 /// Sets new decoding timestamp.
891 pub fn set_dts(&mut self, dts: Option<u64>) { self.dts = dts; }
892 /// Sets new duration.
893 pub fn set_duration(&mut self, dur: Option<u64>) { self.duration = dur; }
894
895 /// Converts time in given scale into timestamp in given base.
896 pub fn time_to_ts(time: u64, base: u64, tb_num: u32, tb_den: u32) -> u64 {
897 let tb_num = tb_num as u64;
898 let tb_den = tb_den as u64;
899 let tmp = time.checked_mul(tb_num);
900 if let Some(tmp) = tmp {
901 tmp / base / tb_den
902 } else {
903 let tmp = time.checked_mul(tb_num);
904 if let Some(tmp) = tmp {
905 tmp / base / tb_den
906 } else {
907 let coarse = time / base;
908 let tmp = coarse.checked_mul(tb_num);
909 if let Some(tmp) = tmp {
910 tmp / tb_den
911 } else {
912 (coarse / tb_den) * tb_num
913 }
914 }
915 }
916 }
917 /// Converts timestamp in given base into time in given scale.
918 pub fn ts_to_time(ts: u64, base: u64, tb_num: u32, tb_den: u32) -> u64 {
919 let tb_num = tb_num as u64;
920 let tb_den = tb_den as u64;
921 let tmp = ts.checked_mul(base);
922 if let Some(tmp) = tmp {
923 let tmp2 = tmp.checked_mul(tb_num);
924 if let Some(tmp2) = tmp2 {
925 tmp2 / tb_den
926 } else {
927 (tmp / tb_den) * tb_num
928 }
929 } else {
930 let tmp = ts.checked_mul(tb_num);
931 if let Some(tmp) = tmp {
932 (tmp / tb_den) * base
933 } else {
934 (ts / tb_den) * base * tb_num
935 }
936 }
937 }
938 }
939
940 /// Decoded frame information.
941 #[allow(dead_code)]
942 #[derive(Clone)]
943 pub struct NAFrame {
944 /// Frame timestamp.
945 pub ts: NATimeInfo,
946 /// Frame ID.
947 pub id: i64,
948 buffer: NABufferType,
949 info: NACodecInfoRef,
950 /// Frame type.
951 pub frame_type: FrameType,
952 /// Keyframe flag.
953 pub key: bool,
954 // options: HashMap<String, NAValue>,
955 }
956
957 /// A specialised type for reference-counted `NAFrame`.
958 pub type NAFrameRef = Arc<NAFrame>;
959
960 fn get_plane_size(info: &NAVideoInfo, idx: usize) -> (usize, usize) {
961 let chromaton = info.get_format().get_chromaton(idx);
962 if chromaton.is_none() { return (0, 0); }
963 let (hs, vs) = chromaton.unwrap().get_subsampling();
964 let w = (info.get_width() + ((1 << hs) - 1)) >> hs;
965 let h = (info.get_height() + ((1 << vs) - 1)) >> vs;
966 (w, h)
967 }
968
969 impl NAFrame {
970 /// Constructs a new `NAFrame` instance.
971 pub fn new(ts: NATimeInfo,
972 ftype: FrameType,
973 keyframe: bool,
974 info: NACodecInfoRef,
975 /*options: HashMap<String, NAValue>,*/
976 buffer: NABufferType) -> Self {
977 NAFrame { ts, id: 0, buffer, info, frame_type: ftype, key: keyframe/*, options*/ }
978 }
979 /// Returns frame format information.
980 pub fn get_info(&self) -> NACodecInfoRef { self.info.clone() }
981 /// Returns frame type.
982 pub fn get_frame_type(&self) -> FrameType { self.frame_type }
983 /// Reports whether the frame is a keyframe.
984 pub fn is_keyframe(&self) -> bool { self.key }
985 /// Sets new frame type.
986 pub fn set_frame_type(&mut self, ftype: FrameType) { self.frame_type = ftype; }
987 /// Sets keyframe flag.
988 pub fn set_keyframe(&mut self, key: bool) { self.key = key; }
989 /// Returns frame timestamp.
990 pub fn get_time_information(&self) -> NATimeInfo { self.ts }
991 /// Returns frame presentation time.
992 pub fn get_pts(&self) -> Option<u64> { self.ts.get_pts() }
993 /// Returns frame decoding time.
994 pub fn get_dts(&self) -> Option<u64> { self.ts.get_dts() }
995 /// Returns picture ID.
996 pub fn get_id(&self) -> i64 { self.id }
997 /// Returns frame display duration.
998 pub fn get_duration(&self) -> Option<u64> { self.ts.get_duration() }
999 /// Sets new presentation timestamp.
1000 pub fn set_pts(&mut self, pts: Option<u64>) { self.ts.set_pts(pts); }
1001 /// Sets new decoding timestamp.
1002 pub fn set_dts(&mut self, dts: Option<u64>) { self.ts.set_dts(dts); }
1003 /// Sets new picture ID.
1004 pub fn set_id(&mut self, id: i64) { self.id = id; }
1005 /// Sets new duration.
1006 pub fn set_duration(&mut self, dur: Option<u64>) { self.ts.set_duration(dur); }
1007
1008 /// Returns a reference to the frame data.
1009 pub fn get_buffer(&self) -> NABufferType { self.buffer.clone() }
1010
1011 /// Converts current instance into a reference-counted one.
1012 pub fn into_ref(self) -> NAFrameRef { Arc::new(self) }
1013
1014 /// Creates new frame with metadata from `NAPacket`.
1015 pub fn new_from_pkt(pkt: &NAPacket, info: NACodecInfoRef, buf: NABufferType) -> NAFrame {
1016 NAFrame::new(pkt.ts, FrameType::Other, pkt.keyframe, info, /*HashMap::new(),*/ buf)
1017 }
1018 }
1019
1020 impl fmt::Display for NAFrame {
1021 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1022 let mut ostr = format!("frame type {}", self.frame_type);
1023 if let Some(pts) = self.ts.pts { ostr = format!("{} pts {}", ostr, pts); }
1024 if let Some(dts) = self.ts.dts { ostr = format!("{} dts {}", ostr, dts); }
1025 if let Some(dur) = self.ts.duration { ostr = format!("{} duration {}", ostr, dur); }
1026 if self.key { ostr = format!("{} kf", ostr); }
1027 write!(f, "[{}]", ostr)
1028 }
1029 }
1030
1031 /// A list of possible stream types.
1032 #[derive(Debug,Clone,Copy,PartialEq)]
1033 #[allow(dead_code)]
1034 pub enum StreamType {
1035 /// Video stream.
1036 Video,
1037 /// Audio stream.
1038 Audio,
1039 /// Subtitles.
1040 Subtitles,
1041 /// Any data stream (or might be an unrecognized audio/video stream).
1042 Data,
1043 /// Nonexistent stream.
1044 None,
1045 }
1046
1047 impl fmt::Display for StreamType {
1048 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1049 match *self {
1050 StreamType::Video => write!(f, "Video"),
1051 StreamType::Audio => write!(f, "Audio"),
1052 StreamType::Subtitles => write!(f, "Subtitles"),
1053 StreamType::Data => write!(f, "Data"),
1054 StreamType::None => write!(f, "-"),
1055 }
1056 }
1057 }
1058
1059 /// Stream data.
1060 #[allow(dead_code)]
1061 #[derive(Clone)]
1062 pub struct NAStream {
1063 media_type: StreamType,
1064 /// Stream ID.
1065 pub id: u32,
1066 num: usize,
1067 info: NACodecInfoRef,
1068 /// Timebase numerator.
1069 pub tb_num: u32,
1070 /// Timebase denominator.
1071 pub tb_den: u32,
1072 }
1073
1074 /// A specialised reference-counted `NAStream` type.
1075 pub type NAStreamRef = Arc<NAStream>;
1076
1077 /// Downscales the timebase by its greatest common denominator.
1078 pub fn reduce_timebase(tb_num: u32, tb_den: u32) -> (u32, u32) {
1079 if tb_num == 0 { return (tb_num, tb_den); }
1080 if (tb_den % tb_num) == 0 { return (1, tb_den / tb_num); }
1081
1082 let mut a = tb_num;
1083 let mut b = tb_den;
1084
1085 while a != b {
1086 if a > b { a -= b; }
1087 else if b > a { b -= a; }
1088 }
1089
1090 (tb_num / a, tb_den / a)
1091 }
1092
1093 impl NAStream {
1094 /// Constructs a new `NAStream` instance.
1095 pub fn new(mt: StreamType, id: u32, info: NACodecInfo, tb_num: u32, tb_den: u32) -> Self {
1096 let (n, d) = reduce_timebase(tb_num, tb_den);
1097 NAStream { media_type: mt, id, num: 0, info: info.into_ref(), tb_num: n, tb_den: d }
1098 }
1099 /// Returns stream id.
1100 pub fn get_id(&self) -> u32 { self.id }
1101 /// Returns stream type.
1102 pub fn get_media_type(&self) -> StreamType { self.media_type }
1103 /// Returns stream number assigned by demuxer.
1104 pub fn get_num(&self) -> usize { self.num }
1105 /// Sets stream number.
1106 pub fn set_num(&mut self, num: usize) { self.num = num; }
1107 /// Returns codec information.
1108 pub fn get_info(&self) -> NACodecInfoRef { self.info.clone() }
1109 /// Returns stream timebase.
1110 pub fn get_timebase(&self) -> (u32, u32) { (self.tb_num, self.tb_den) }
1111 /// Sets new stream timebase.
1112 pub fn set_timebase(&mut self, tb_num: u32, tb_den: u32) {
1113 let (n, d) = reduce_timebase(tb_num, tb_den);
1114 self.tb_num = n;
1115 self.tb_den = d;
1116 }
1117 /// Converts current instance into a reference-counted one.
1118 pub fn into_ref(self) -> NAStreamRef { Arc::new(self) }
1119 }
1120
1121 impl fmt::Display for NAStream {
1122 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1123 write!(f, "({}#{} @ {}/{} - {})", self.media_type, self.id, self.tb_num, self.tb_den, self.info.get_properties())
1124 }
1125 }
1126
1127 /// Packet with compressed data.
1128 #[allow(dead_code)]
1129 pub struct NAPacket {
1130 stream: NAStreamRef,
1131 /// Packet timestamp.
1132 pub ts: NATimeInfo,
1133 buffer: NABufferRef<Vec<u8>>,
1134 /// Keyframe flag.
1135 pub keyframe: bool,
1136 // options: HashMap<String, NAValue<'a>>,
1137 }
1138
1139 impl NAPacket {
1140 /// Constructs a new `NAPacket` instance.
1141 pub fn new(str: NAStreamRef, ts: NATimeInfo, kf: bool, vec: Vec<u8>) -> Self {
1142 // let mut vec: Vec<u8> = Vec::new();
1143 // vec.resize(size, 0);
1144 NAPacket { stream: str, ts, keyframe: kf, buffer: NABufferRef::new(vec) }
1145 }
1146 /// Returns information about the stream packet belongs to.
1147 pub fn get_stream(&self) -> NAStreamRef { self.stream.clone() }
1148 /// Returns packet timestamp.
1149 pub fn get_time_information(&self) -> NATimeInfo { self.ts }
1150 /// Returns packet presentation timestamp.
1151 pub fn get_pts(&self) -> Option<u64> { self.ts.get_pts() }
1152 /// Returns packet decoding timestamp.
1153 pub fn get_dts(&self) -> Option<u64> { self.ts.get_dts() }
1154 /// Returns packet duration.
1155 pub fn get_duration(&self) -> Option<u64> { self.ts.get_duration() }
1156 /// Reports whether this is a keyframe packet.
1157 pub fn is_keyframe(&self) -> bool { self.keyframe }
1158 /// Returns a reference to packet data.
1159 pub fn get_buffer(&self) -> NABufferRef<Vec<u8>> { self.buffer.clone() }
1160 }
1161
1162 impl Drop for NAPacket {
1163 fn drop(&mut self) {}
1164 }
1165
1166 impl fmt::Display for NAPacket {
1167 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1168 let mut ostr = format!("[pkt for {} size {}", self.stream, self.buffer.len());
1169 if let Some(pts) = self.ts.pts { ostr = format!("{} pts {}", ostr, pts); }
1170 if let Some(dts) = self.ts.dts { ostr = format!("{} dts {}", ostr, dts); }
1171 if let Some(dur) = self.ts.duration { ostr = format!("{} duration {}", ostr, dur); }
1172 if self.keyframe { ostr = format!("{} kf", ostr); }
1173 ostr += "]";
1174 write!(f, "{}", ostr)
1175 }
1176 }