core/frame: add missing case for NABufferType::get_offset()
[nihav.git] / nihav-core / src / frame.rs
1 //! Packets and decoded frames functionality.
2 use std::cmp::max;
3 //use std::collections::HashMap;
4 use std::fmt;
5 use std::sync::Arc;
6 pub use crate::formats::*;
7 pub use crate::refs::*;
8
9 /// Audio stream information.
10 #[allow(dead_code)]
11 #[derive(Clone,Copy,PartialEq)]
12 pub struct NAAudioInfo {
13 /// Sample rate.
14 pub sample_rate: u32,
15 /// Number of channels.
16 pub channels: u8,
17 /// Audio sample format.
18 pub format: NASoniton,
19 /// Length of one audio block in samples.
20 pub block_len: usize,
21 }
22
23 impl NAAudioInfo {
24 /// Constructs a new `NAAudioInfo` instance.
25 pub fn new(sr: u32, ch: u8, fmt: NASoniton, bl: usize) -> Self {
26 NAAudioInfo { sample_rate: sr, channels: ch, format: fmt, block_len: bl }
27 }
28 /// Returns audio sample rate.
29 pub fn get_sample_rate(&self) -> u32 { self.sample_rate }
30 /// Returns the number of channels.
31 pub fn get_channels(&self) -> u8 { self.channels }
32 /// Returns sample format.
33 pub fn get_format(&self) -> NASoniton { self.format }
34 /// Returns one audio block duration in samples.
35 pub fn get_block_len(&self) -> usize { self.block_len }
36 }
37
38 impl fmt::Display for NAAudioInfo {
39 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
40 write!(f, "{} Hz, {} ch", self.sample_rate, self.channels)
41 }
42 }
43
44 /// Video stream information.
45 #[allow(dead_code)]
46 #[derive(Clone,Copy,PartialEq)]
47 pub struct NAVideoInfo {
48 /// Picture width.
49 pub width: usize,
50 /// Picture height.
51 pub height: usize,
52 /// Picture is stored downside up.
53 pub flipped: bool,
54 /// Picture pixel format.
55 pub format: NAPixelFormaton,
56 }
57
58 impl NAVideoInfo {
59 /// Constructs a new `NAVideoInfo` instance.
60 pub fn new(w: usize, h: usize, flip: bool, fmt: NAPixelFormaton) -> Self {
61 NAVideoInfo { width: w, height: h, flipped: flip, format: fmt }
62 }
63 /// Returns picture width.
64 pub fn get_width(&self) -> usize { self.width as usize }
65 /// Returns picture height.
66 pub fn get_height(&self) -> usize { self.height as usize }
67 /// Returns picture orientation.
68 pub fn is_flipped(&self) -> bool { self.flipped }
69 /// Returns picture pixel format.
70 pub fn get_format(&self) -> NAPixelFormaton { self.format }
71 /// Sets new picture width.
72 pub fn set_width(&mut self, w: usize) { self.width = w; }
73 /// Sets new picture height.
74 pub fn set_height(&mut self, h: usize) { self.height = h; }
75 }
76
77 impl fmt::Display for NAVideoInfo {
78 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
79 write!(f, "{}x{}", self.width, self.height)
80 }
81 }
82
83 /// A list of possible stream information types.
84 #[derive(Clone,Copy,PartialEq)]
85 pub enum NACodecTypeInfo {
86 /// No codec present.
87 None,
88 /// Audio codec information.
89 Audio(NAAudioInfo),
90 /// Video codec information.
91 Video(NAVideoInfo),
92 }
93
94 impl NACodecTypeInfo {
95 /// Returns video stream information.
96 pub fn get_video_info(&self) -> Option<NAVideoInfo> {
97 match *self {
98 NACodecTypeInfo::Video(vinfo) => Some(vinfo),
99 _ => None,
100 }
101 }
102 /// Returns audio stream information.
103 pub fn get_audio_info(&self) -> Option<NAAudioInfo> {
104 match *self {
105 NACodecTypeInfo::Audio(ainfo) => Some(ainfo),
106 _ => None,
107 }
108 }
109 /// Reports whether the current stream is video stream.
110 pub fn is_video(&self) -> bool {
111 match *self {
112 NACodecTypeInfo::Video(_) => true,
113 _ => false,
114 }
115 }
116 /// Reports whether the current stream is audio stream.
117 pub fn is_audio(&self) -> bool {
118 match *self {
119 NACodecTypeInfo::Audio(_) => true,
120 _ => false,
121 }
122 }
123 }
124
125 impl fmt::Display for NACodecTypeInfo {
126 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
127 let ret = match *self {
128 NACodecTypeInfo::None => "".to_string(),
129 NACodecTypeInfo::Audio(fmt) => format!("{}", fmt),
130 NACodecTypeInfo::Video(fmt) => format!("{}", fmt),
131 };
132 write!(f, "{}", ret)
133 }
134 }
135
136 /// Decoded video frame.
137 ///
138 /// NihAV frames are stored in native type (8/16/32-bit elements) inside a single buffer.
139 /// In case of image with several components those components are stored sequentially and can be accessed in the buffer starting at corresponding component offset.
140 #[derive(Clone)]
141 pub struct NAVideoBuffer<T> {
142 info: NAVideoInfo,
143 data: NABufferRef<Vec<T>>,
144 offs: Vec<usize>,
145 strides: Vec<usize>,
146 }
147
148 impl<T: Clone> NAVideoBuffer<T> {
149 /// Returns the component offset (0 for all unavailable offsets).
150 pub fn get_offset(&self, idx: usize) -> usize {
151 if idx >= self.offs.len() { 0 }
152 else { self.offs[idx] }
153 }
154 /// Returns picture info.
155 pub fn get_info(&self) -> NAVideoInfo { self.info }
156 /// Returns an immutable reference to the data.
157 pub fn get_data(&self) -> &Vec<T> { self.data.as_ref() }
158 /// Returns a mutable reference to the data.
159 pub fn get_data_mut(&mut self) -> Option<&mut Vec<T>> { self.data.as_mut() }
160 /// Returns the number of components in picture format.
161 pub fn get_num_components(&self) -> usize { self.offs.len() }
162 /// Creates a copy of current `NAVideoBuffer`.
163 pub fn copy_buffer(&mut self) -> Self {
164 let mut data: Vec<T> = Vec::with_capacity(self.data.len());
165 data.clone_from(self.data.as_ref());
166 let mut offs: Vec<usize> = Vec::with_capacity(self.offs.len());
167 offs.clone_from(&self.offs);
168 let mut strides: Vec<usize> = Vec::with_capacity(self.strides.len());
169 strides.clone_from(&self.strides);
170 NAVideoBuffer { info: self.info, data: NABufferRef::new(data), offs, strides }
171 }
172 /// Returns stride (distance between subsequent lines) for the requested component.
173 pub fn get_stride(&self, idx: usize) -> usize {
174 if idx >= self.strides.len() { return 0; }
175 self.strides[idx]
176 }
177 /// Returns requested component dimensions.
178 pub fn get_dimensions(&self, idx: usize) -> (usize, usize) {
179 get_plane_size(&self.info, idx)
180 }
181 /// Converts current instance into buffer reference.
182 pub fn into_ref(self) -> NABufferRef<Self> {
183 NABufferRef::new(self)
184 }
185 }
186
187 /// A specialised type for reference-counted `NAVideoBuffer`.
188 pub type NAVideoBufferRef<T> = NABufferRef<NAVideoBuffer<T>>;
189
190 /// Decoded audio frame.
191 ///
192 /// NihAV frames are stored in native type (8/16/32-bit elements) inside a single buffer.
193 /// In case of planar audio samples for each channel are stored sequentially and can be accessed in the buffer starting at corresponding channel offset.
194 #[derive(Clone)]
195 pub struct NAAudioBuffer<T> {
196 info: NAAudioInfo,
197 data: NABufferRef<Vec<T>>,
198 offs: Vec<usize>,
199 stride: usize,
200 step: usize,
201 chmap: NAChannelMap,
202 len: usize,
203 }
204
205 impl<T: Clone> NAAudioBuffer<T> {
206 /// Returns the start position of requested channel data.
207 pub fn get_offset(&self, idx: usize) -> usize {
208 if idx >= self.offs.len() { 0 }
209 else { self.offs[idx] }
210 }
211 /// Returns the distance between the start of one channel and the next one.
212 pub fn get_stride(&self) -> usize { self.stride }
213 /// Returns the distance between the samples in one channel.
214 pub fn get_step(&self) -> usize { self.step }
215 /// Returns audio format information.
216 pub fn get_info(&self) -> NAAudioInfo { self.info }
217 /// Returns channel map.
218 pub fn get_chmap(&self) -> &NAChannelMap { &self.chmap }
219 /// Returns an immutable reference to the data.
220 pub fn get_data(&self) -> &Vec<T> { self.data.as_ref() }
221 /// Returns a mutable reference to the data.
222 pub fn get_data_mut(&mut self) -> Option<&mut Vec<T>> { self.data.as_mut() }
223 /// Clones current `NAAudioBuffer` into a new one.
224 pub fn copy_buffer(&mut self) -> Self {
225 let mut data: Vec<T> = Vec::with_capacity(self.data.len());
226 data.clone_from(self.data.as_ref());
227 let mut offs: Vec<usize> = Vec::with_capacity(self.offs.len());
228 offs.clone_from(&self.offs);
229 NAAudioBuffer { info: self.info, data: NABufferRef::new(data), offs, chmap: self.get_chmap().clone(), len: self.len, stride: self.stride, step: self.step }
230 }
231 /// Return the length of frame in samples.
232 pub fn get_length(&self) -> usize { self.len }
233 }
234
235 impl NAAudioBuffer<u8> {
236 /// Constructs a new `NAAudioBuffer` instance.
237 pub fn new_from_buf(info: NAAudioInfo, data: NABufferRef<Vec<u8>>, chmap: NAChannelMap) -> Self {
238 let len = data.len();
239 NAAudioBuffer { info, data, chmap, offs: Vec::new(), len, stride: 0, step: 0 }
240 }
241 }
242
243 /// A list of possible decoded frame types.
244 #[derive(Clone)]
245 pub enum NABufferType {
246 /// 8-bit video buffer.
247 Video (NAVideoBufferRef<u8>),
248 /// 16-bit video buffer (i.e. every component or packed pixel fits into 16 bits).
249 Video16 (NAVideoBufferRef<u16>),
250 /// 32-bit video buffer (i.e. every component or packed pixel fits into 32 bits).
251 Video32 (NAVideoBufferRef<u32>),
252 /// Packed video buffer.
253 VideoPacked(NAVideoBufferRef<u8>),
254 /// Audio buffer with 8-bit unsigned integer audio.
255 AudioU8 (NAAudioBuffer<u8>),
256 /// Audio buffer with 16-bit signed integer audio.
257 AudioI16 (NAAudioBuffer<i16>),
258 /// Audio buffer with 32-bit signed integer audio.
259 AudioI32 (NAAudioBuffer<i32>),
260 /// Audio buffer with 32-bit floating point audio.
261 AudioF32 (NAAudioBuffer<f32>),
262 /// Packed audio buffer.
263 AudioPacked(NAAudioBuffer<u8>),
264 /// Buffer with generic data (e.g. subtitles).
265 Data (NABufferRef<Vec<u8>>),
266 /// No data present.
267 None,
268 }
269
270 impl NABufferType {
271 /// Returns the offset to the requested component or channel.
272 pub fn get_offset(&self, idx: usize) -> usize {
273 match *self {
274 NABufferType::Video(ref vb) => vb.get_offset(idx),
275 NABufferType::Video16(ref vb) => vb.get_offset(idx),
276 NABufferType::Video32(ref vb) => vb.get_offset(idx),
277 NABufferType::VideoPacked(ref vb) => vb.get_offset(idx),
278 NABufferType::AudioU8(ref ab) => ab.get_offset(idx),
279 NABufferType::AudioI16(ref ab) => ab.get_offset(idx),
280 NABufferType::AudioI32(ref ab) => ab.get_offset(idx),
281 NABufferType::AudioF32(ref ab) => ab.get_offset(idx),
282 NABufferType::AudioPacked(ref ab) => ab.get_offset(idx),
283 _ => 0,
284 }
285 }
286 /// Returns information for video frames.
287 pub fn get_video_info(&self) -> Option<NAVideoInfo> {
288 match *self {
289 NABufferType::Video(ref vb) => Some(vb.get_info()),
290 NABufferType::Video16(ref vb) => Some(vb.get_info()),
291 NABufferType::Video32(ref vb) => Some(vb.get_info()),
292 NABufferType::VideoPacked(ref vb) => Some(vb.get_info()),
293 _ => None,
294 }
295 }
296 /// Returns reference to 8-bit (or packed) video buffer.
297 pub fn get_vbuf(&self) -> Option<NAVideoBufferRef<u8>> {
298 match *self {
299 NABufferType::Video(ref vb) => Some(vb.clone()),
300 NABufferType::VideoPacked(ref vb) => Some(vb.clone()),
301 _ => None,
302 }
303 }
304 /// Returns reference to 16-bit video buffer.
305 pub fn get_vbuf16(&self) -> Option<NAVideoBufferRef<u16>> {
306 match *self {
307 NABufferType::Video16(ref vb) => Some(vb.clone()),
308 _ => None,
309 }
310 }
311 /// Returns reference to 32-bit video buffer.
312 pub fn get_vbuf32(&self) -> Option<NAVideoBufferRef<u32>> {
313 match *self {
314 NABufferType::Video32(ref vb) => Some(vb.clone()),
315 _ => None,
316 }
317 }
318 /// Returns information for audio frames.
319 pub fn get_audio_info(&self) -> Option<NAAudioInfo> {
320 match *self {
321 NABufferType::AudioU8(ref ab) => Some(ab.get_info()),
322 NABufferType::AudioI16(ref ab) => Some(ab.get_info()),
323 NABufferType::AudioI32(ref ab) => Some(ab.get_info()),
324 NABufferType::AudioF32(ref ab) => Some(ab.get_info()),
325 NABufferType::AudioPacked(ref ab) => Some(ab.get_info()),
326 _ => None,
327 }
328 }
329 /// Returns audio channel map.
330 pub fn get_chmap(&self) -> Option<&NAChannelMap> {
331 match *self {
332 NABufferType::AudioU8(ref ab) => Some(ab.get_chmap()),
333 NABufferType::AudioI16(ref ab) => Some(ab.get_chmap()),
334 NABufferType::AudioI32(ref ab) => Some(ab.get_chmap()),
335 NABufferType::AudioF32(ref ab) => Some(ab.get_chmap()),
336 NABufferType::AudioPacked(ref ab) => Some(ab.get_chmap()),
337 _ => None,
338 }
339 }
340 /// Returns audio frame duration in samples.
341 pub fn get_audio_length(&self) -> usize {
342 match *self {
343 NABufferType::AudioU8(ref ab) => ab.get_length(),
344 NABufferType::AudioI16(ref ab) => ab.get_length(),
345 NABufferType::AudioI32(ref ab) => ab.get_length(),
346 NABufferType::AudioF32(ref ab) => ab.get_length(),
347 NABufferType::AudioPacked(ref ab) => ab.get_length(),
348 _ => 0,
349 }
350 }
351 /// Returns the distance between starts of two channels.
352 pub fn get_audio_stride(&self) -> usize {
353 match *self {
354 NABufferType::AudioU8(ref ab) => ab.get_stride(),
355 NABufferType::AudioI16(ref ab) => ab.get_stride(),
356 NABufferType::AudioI32(ref ab) => ab.get_stride(),
357 NABufferType::AudioF32(ref ab) => ab.get_stride(),
358 NABufferType::AudioPacked(ref ab) => ab.get_stride(),
359 _ => 0,
360 }
361 }
362 /// Returns the distance between two samples in one channel.
363 pub fn get_audio_step(&self) -> usize {
364 match *self {
365 NABufferType::AudioU8(ref ab) => ab.get_step(),
366 NABufferType::AudioI16(ref ab) => ab.get_step(),
367 NABufferType::AudioI32(ref ab) => ab.get_step(),
368 NABufferType::AudioF32(ref ab) => ab.get_step(),
369 NABufferType::AudioPacked(ref ab) => ab.get_step(),
370 _ => 0,
371 }
372 }
373 /// Returns reference to 8-bit (or packed) audio buffer.
374 pub fn get_abuf_u8(&self) -> Option<NAAudioBuffer<u8>> {
375 match *self {
376 NABufferType::AudioU8(ref ab) => Some(ab.clone()),
377 NABufferType::AudioPacked(ref ab) => Some(ab.clone()),
378 _ => None,
379 }
380 }
381 /// Returns reference to 16-bit audio buffer.
382 pub fn get_abuf_i16(&self) -> Option<NAAudioBuffer<i16>> {
383 match *self {
384 NABufferType::AudioI16(ref ab) => Some(ab.clone()),
385 _ => None,
386 }
387 }
388 /// Returns reference to 32-bit integer audio buffer.
389 pub fn get_abuf_i32(&self) -> Option<NAAudioBuffer<i32>> {
390 match *self {
391 NABufferType::AudioI32(ref ab) => Some(ab.clone()),
392 _ => None,
393 }
394 }
395 /// Returns reference to 32-bit floating point audio buffer.
396 pub fn get_abuf_f32(&self) -> Option<NAAudioBuffer<f32>> {
397 match *self {
398 NABufferType::AudioF32(ref ab) => Some(ab.clone()),
399 _ => None,
400 }
401 }
402 }
403
404 const NA_SIMPLE_VFRAME_COMPONENTS: usize = 4;
405 /// Simplified decoded frame data.
406 pub struct NASimpleVideoFrame<'a, T: Copy> {
407 /// Widths of each picture component.
408 pub width: [usize; NA_SIMPLE_VFRAME_COMPONENTS],
409 /// Heights of each picture component.
410 pub height: [usize; NA_SIMPLE_VFRAME_COMPONENTS],
411 /// Orientation (upside-down or downside-up) flag.
412 pub flip: bool,
413 /// Strides for each component.
414 pub stride: [usize; NA_SIMPLE_VFRAME_COMPONENTS],
415 /// Start of each component.
416 pub offset: [usize; NA_SIMPLE_VFRAME_COMPONENTS],
417 /// Number of components.
418 pub components: usize,
419 /// Pointer to the picture pixel data.
420 pub data: &'a mut [T],
421 }
422
423 impl<'a, T:Copy> NASimpleVideoFrame<'a, T> {
424 /// Constructs a new instance of `NASimpleVideoFrame` from `NAVideoBuffer`.
425 pub fn from_video_buf(vbuf: &'a mut NAVideoBuffer<T>) -> Option<Self> {
426 let vinfo = vbuf.get_info();
427 let components = vinfo.format.components as usize;
428 if components > NA_SIMPLE_VFRAME_COMPONENTS {
429 return None;
430 }
431 let mut w: [usize; NA_SIMPLE_VFRAME_COMPONENTS] = [0; NA_SIMPLE_VFRAME_COMPONENTS];
432 let mut h: [usize; NA_SIMPLE_VFRAME_COMPONENTS] = [0; NA_SIMPLE_VFRAME_COMPONENTS];
433 let mut s: [usize; NA_SIMPLE_VFRAME_COMPONENTS] = [0; NA_SIMPLE_VFRAME_COMPONENTS];
434 let mut o: [usize; NA_SIMPLE_VFRAME_COMPONENTS] = [0; NA_SIMPLE_VFRAME_COMPONENTS];
435 for comp in 0..components {
436 let (width, height) = vbuf.get_dimensions(comp);
437 w[comp] = width;
438 h[comp] = height;
439 s[comp] = vbuf.get_stride(comp);
440 o[comp] = vbuf.get_offset(comp);
441 }
442 let flip = vinfo.flipped;
443 Some(NASimpleVideoFrame {
444 width: w,
445 height: h,
446 flip,
447 stride: s,
448 offset: o,
449 components,
450 data: vbuf.data.as_mut_slice(),
451 })
452 }
453 }
454
455 /// A list of possible frame allocator errors.
456 #[derive(Debug,Clone,Copy,PartialEq)]
457 pub enum AllocatorError {
458 /// Requested picture dimensions are too large.
459 TooLargeDimensions,
460 /// Invalid input format.
461 FormatError,
462 }
463
464 /// Constructs a new video buffer with requested format.
465 ///
466 /// `align` is power of two alignment for image. E.g. the value of 5 means that frame dimensions will be padded to be multiple of 32.
467 pub fn alloc_video_buffer(vinfo: NAVideoInfo, align: u8) -> Result<NABufferType, AllocatorError> {
468 let fmt = &vinfo.format;
469 let mut new_size: usize = 0;
470 let mut offs: Vec<usize> = Vec::new();
471 let mut strides: Vec<usize> = Vec::new();
472
473 for i in 0..fmt.get_num_comp() {
474 if fmt.get_chromaton(i) == None { return Err(AllocatorError::FormatError); }
475 }
476
477 let align_mod = ((1 << align) as usize) - 1;
478 let width = ((vinfo.width as usize) + align_mod) & !align_mod;
479 let height = ((vinfo.height as usize) + align_mod) & !align_mod;
480 let mut max_depth = 0;
481 let mut all_packed = true;
482 let mut all_bytealigned = true;
483 for i in 0..fmt.get_num_comp() {
484 let ochr = fmt.get_chromaton(i);
485 if ochr.is_none() { continue; }
486 let chr = ochr.unwrap();
487 if !chr.is_packed() {
488 all_packed = false;
489 } else if ((chr.get_shift() + chr.get_depth()) & 7) != 0 {
490 all_bytealigned = false;
491 }
492 max_depth = max(max_depth, chr.get_depth());
493 }
494 let unfit_elem_size = match fmt.get_elem_size() {
495 2 | 4 => false,
496 _ => true,
497 };
498
499 //todo semi-packed like NV12
500 if fmt.is_paletted() {
501 //todo various-sized palettes?
502 let stride = vinfo.get_format().get_chromaton(0).unwrap().get_linesize(width);
503 let pic_sz = stride.checked_mul(height);
504 if pic_sz == None { return Err(AllocatorError::TooLargeDimensions); }
505 let pal_size = 256 * (fmt.get_elem_size() as usize);
506 let new_size = pic_sz.unwrap().checked_add(pal_size);
507 if new_size == None { return Err(AllocatorError::TooLargeDimensions); }
508 offs.push(0);
509 offs.push(stride * height);
510 strides.push(stride);
511 let data: Vec<u8> = vec![0; new_size.unwrap()];
512 let buf: NAVideoBuffer<u8> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
513 Ok(NABufferType::Video(buf.into_ref()))
514 } else if !all_packed {
515 for i in 0..fmt.get_num_comp() {
516 let ochr = fmt.get_chromaton(i);
517 if ochr.is_none() { continue; }
518 let chr = ochr.unwrap();
519 offs.push(new_size as usize);
520 let stride = chr.get_linesize(width);
521 let cur_h = chr.get_height(height);
522 let cur_sz = stride.checked_mul(cur_h);
523 if cur_sz == None { return Err(AllocatorError::TooLargeDimensions); }
524 let new_sz = new_size.checked_add(cur_sz.unwrap());
525 if new_sz == None { return Err(AllocatorError::TooLargeDimensions); }
526 new_size = new_sz.unwrap();
527 strides.push(stride);
528 }
529 if max_depth <= 8 {
530 let data: Vec<u8> = vec![0; new_size];
531 let buf: NAVideoBuffer<u8> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
532 Ok(NABufferType::Video(buf.into_ref()))
533 } else if max_depth <= 16 {
534 let data: Vec<u16> = vec![0; new_size];
535 let buf: NAVideoBuffer<u16> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
536 Ok(NABufferType::Video16(buf.into_ref()))
537 } else {
538 let data: Vec<u32> = vec![0; new_size];
539 let buf: NAVideoBuffer<u32> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
540 Ok(NABufferType::Video32(buf.into_ref()))
541 }
542 } else if all_bytealigned || unfit_elem_size {
543 let elem_sz = fmt.get_elem_size();
544 let line_sz = width.checked_mul(elem_sz as usize);
545 if line_sz == None { return Err(AllocatorError::TooLargeDimensions); }
546 let new_sz = line_sz.unwrap().checked_mul(height);
547 if new_sz == None { return Err(AllocatorError::TooLargeDimensions); }
548 new_size = new_sz.unwrap();
549 let data: Vec<u8> = vec![0; new_size];
550 strides.push(line_sz.unwrap());
551 let buf: NAVideoBuffer<u8> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
552 Ok(NABufferType::VideoPacked(buf.into_ref()))
553 } else {
554 let elem_sz = fmt.get_elem_size();
555 let new_sz = width.checked_mul(height);
556 if new_sz == None { return Err(AllocatorError::TooLargeDimensions); }
557 new_size = new_sz.unwrap();
558 match elem_sz {
559 2 => {
560 let data: Vec<u16> = vec![0; new_size];
561 strides.push(width);
562 let buf: NAVideoBuffer<u16> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
563 Ok(NABufferType::Video16(buf.into_ref()))
564 },
565 4 => {
566 let data: Vec<u32> = vec![0; new_size];
567 strides.push(width);
568 let buf: NAVideoBuffer<u32> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
569 Ok(NABufferType::Video32(buf.into_ref()))
570 },
571 _ => unreachable!(),
572 }
573 }
574 }
575
576 /// Constructs a new audio buffer for the requested format and length.
577 #[allow(clippy::collapsible_if)]
578 pub fn alloc_audio_buffer(ainfo: NAAudioInfo, nsamples: usize, chmap: NAChannelMap) -> Result<NABufferType, AllocatorError> {
579 let mut offs: Vec<usize> = Vec::new();
580 if ainfo.format.is_planar() || ((ainfo.format.get_bits() % 8) == 0) {
581 let len = nsamples.checked_mul(ainfo.channels as usize);
582 if len == None { return Err(AllocatorError::TooLargeDimensions); }
583 let length = len.unwrap();
584 let stride;
585 let step;
586 if ainfo.format.is_planar() {
587 stride = nsamples;
588 step = 1;
589 for i in 0..ainfo.channels {
590 offs.push((i as usize) * stride);
591 }
592 } else {
593 stride = 1;
594 step = ainfo.channels as usize;
595 for i in 0..ainfo.channels {
596 offs.push(i as usize);
597 }
598 }
599 if ainfo.format.is_float() {
600 if ainfo.format.get_bits() == 32 {
601 let data: Vec<f32> = vec![0.0; length];
602 let buf: NAAudioBuffer<f32> = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs, chmap, len: nsamples, stride, step };
603 Ok(NABufferType::AudioF32(buf))
604 } else {
605 Err(AllocatorError::TooLargeDimensions)
606 }
607 } else {
608 if ainfo.format.get_bits() == 8 && !ainfo.format.is_signed() {
609 let data: Vec<u8> = vec![0; length];
610 let buf: NAAudioBuffer<u8> = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs, chmap, len: nsamples, stride, step };
611 Ok(NABufferType::AudioU8(buf))
612 } else if ainfo.format.get_bits() == 16 && ainfo.format.is_signed() {
613 let data: Vec<i16> = vec![0; length];
614 let buf: NAAudioBuffer<i16> = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs, chmap, len: nsamples, stride, step };
615 Ok(NABufferType::AudioI16(buf))
616 } else {
617 Err(AllocatorError::TooLargeDimensions)
618 }
619 }
620 } else {
621 let len = nsamples.checked_mul(ainfo.channels as usize);
622 if len == None { return Err(AllocatorError::TooLargeDimensions); }
623 let length = ainfo.format.get_audio_size(len.unwrap() as u64);
624 let data: Vec<u8> = vec![0; length];
625 let buf: NAAudioBuffer<u8> = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs, chmap, len: nsamples, stride: 0, step: 0 };
626 Ok(NABufferType::AudioPacked(buf))
627 }
628 }
629
630 /// Constructs a new buffer for generic data.
631 pub fn alloc_data_buffer(size: usize) -> Result<NABufferType, AllocatorError> {
632 let data: Vec<u8> = vec![0; size];
633 let buf: NABufferRef<Vec<u8>> = NABufferRef::new(data);
634 Ok(NABufferType::Data(buf))
635 }
636
637 /// Creates a clone of current buffer.
638 pub fn copy_buffer(buf: NABufferType) -> NABufferType {
639 buf.clone()
640 }
641
642 /// Video frame pool.
643 ///
644 /// This structure allows codec to effectively reuse old frames instead of allocating and de-allocating frames every time.
645 /// Caller can also reserve some frames for its own purposes e.g. display queue.
646 pub struct NAVideoBufferPool<T:Copy> {
647 pool: Vec<NAVideoBufferRef<T>>,
648 max_len: usize,
649 add_len: usize,
650 }
651
652 impl<T:Copy> NAVideoBufferPool<T> {
653 /// Constructs a new `NAVideoBufferPool` instance.
654 pub fn new(max_len: usize) -> Self {
655 Self {
656 pool: Vec::with_capacity(max_len),
657 max_len,
658 add_len: 0,
659 }
660 }
661 /// Sets the number of buffers reserved for the user.
662 pub fn set_dec_bufs(&mut self, add_len: usize) {
663 self.add_len = add_len;
664 }
665 /// Returns an unused buffer from the pool.
666 pub fn get_free(&mut self) -> Option<NAVideoBufferRef<T>> {
667 for e in self.pool.iter() {
668 if e.get_num_refs() == 1 {
669 return Some(e.clone());
670 }
671 }
672 None
673 }
674 /// Clones provided frame data into a free pool frame.
675 pub fn get_copy(&mut self, rbuf: &NAVideoBufferRef<T>) -> Option<NAVideoBufferRef<T>> {
676 let mut dbuf = self.get_free()?;
677 dbuf.data.copy_from_slice(&rbuf.data);
678 Some(dbuf)
679 }
680 /// Clears the pool from all frames.
681 pub fn reset(&mut self) {
682 self.pool.truncate(0);
683 }
684 }
685
686 impl NAVideoBufferPool<u8> {
687 /// Allocates the target amount of video frames using [`alloc_video_buffer`].
688 ///
689 /// [`alloc_video_buffer`]: ./fn.alloc_video_buffer.html
690 pub fn prealloc_video(&mut self, vinfo: NAVideoInfo, align: u8) -> Result<(), AllocatorError> {
691 let nbufs = self.max_len + self.add_len - self.pool.len();
692 for _ in 0..nbufs {
693 let vbuf = alloc_video_buffer(vinfo, align)?;
694 if let NABufferType::Video(buf) = vbuf {
695 self.pool.push(buf);
696 } else if let NABufferType::VideoPacked(buf) = vbuf {
697 self.pool.push(buf);
698 } else {
699 return Err(AllocatorError::FormatError);
700 }
701 }
702 Ok(())
703 }
704 }
705
706 impl NAVideoBufferPool<u16> {
707 /// Allocates the target amount of video frames using [`alloc_video_buffer`].
708 ///
709 /// [`alloc_video_buffer`]: ./fn.alloc_video_buffer.html
710 pub fn prealloc_video(&mut self, vinfo: NAVideoInfo, align: u8) -> Result<(), AllocatorError> {
711 let nbufs = self.max_len + self.add_len - self.pool.len();
712 for _ in 0..nbufs {
713 let vbuf = alloc_video_buffer(vinfo, align)?;
714 if let NABufferType::Video16(buf) = vbuf {
715 self.pool.push(buf);
716 } else {
717 return Err(AllocatorError::FormatError);
718 }
719 }
720 Ok(())
721 }
722 }
723
724 impl NAVideoBufferPool<u32> {
725 /// Allocates the target amount of video frames using [`alloc_video_buffer`].
726 ///
727 /// [`alloc_video_buffer`]: ./fn.alloc_video_buffer.html
728 pub fn prealloc_video(&mut self, vinfo: NAVideoInfo, align: u8) -> Result<(), AllocatorError> {
729 let nbufs = self.max_len + self.add_len - self.pool.len();
730 for _ in 0..nbufs {
731 let vbuf = alloc_video_buffer(vinfo, align)?;
732 if let NABufferType::Video32(buf) = vbuf {
733 self.pool.push(buf);
734 } else {
735 return Err(AllocatorError::FormatError);
736 }
737 }
738 Ok(())
739 }
740 }
741
742 /// Information about codec contained in a stream.
743 #[allow(dead_code)]
744 #[derive(Clone)]
745 pub struct NACodecInfo {
746 name: &'static str,
747 properties: NACodecTypeInfo,
748 extradata: Option<Arc<Vec<u8>>>,
749 }
750
751 /// A specialised type for reference-counted `NACodecInfo`.
752 pub type NACodecInfoRef = Arc<NACodecInfo>;
753
754 impl NACodecInfo {
755 /// Constructs a new instance of `NACodecInfo`.
756 pub fn new(name: &'static str, p: NACodecTypeInfo, edata: Option<Vec<u8>>) -> Self {
757 let extradata = match edata {
758 None => None,
759 Some(vec) => Some(Arc::new(vec)),
760 };
761 NACodecInfo { name, properties: p, extradata }
762 }
763 /// Constructs a new reference-counted instance of `NACodecInfo`.
764 pub fn new_ref(name: &'static str, p: NACodecTypeInfo, edata: Option<Arc<Vec<u8>>>) -> Self {
765 NACodecInfo { name, properties: p, extradata: edata }
766 }
767 /// Converts current instance into a reference-counted one.
768 pub fn into_ref(self) -> NACodecInfoRef { Arc::new(self) }
769 /// Returns codec information.
770 pub fn get_properties(&self) -> NACodecTypeInfo { self.properties }
771 /// Returns additional initialisation data required by the codec.
772 pub fn get_extradata(&self) -> Option<Arc<Vec<u8>>> {
773 if let Some(ref vec) = self.extradata { return Some(vec.clone()); }
774 None
775 }
776 /// Returns codec name.
777 pub fn get_name(&self) -> &'static str { self.name }
778 /// Reports whether it is a video codec.
779 pub fn is_video(&self) -> bool {
780 if let NACodecTypeInfo::Video(_) = self.properties { return true; }
781 false
782 }
783 /// Reports whether it is an audio codec.
784 pub fn is_audio(&self) -> bool {
785 if let NACodecTypeInfo::Audio(_) = self.properties { return true; }
786 false
787 }
788 /// Constructs a new empty reference-counted instance of `NACodecInfo`.
789 pub fn new_dummy() -> Arc<Self> {
790 Arc::new(DUMMY_CODEC_INFO)
791 }
792 /// Updates codec infomation.
793 pub fn replace_info(&self, p: NACodecTypeInfo) -> Arc<Self> {
794 Arc::new(NACodecInfo { name: self.name, properties: p, extradata: self.extradata.clone() })
795 }
796 }
797
798 impl Default for NACodecInfo {
799 fn default() -> Self { DUMMY_CODEC_INFO }
800 }
801
802 impl fmt::Display for NACodecInfo {
803 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
804 let edata = match self.extradata.clone() {
805 None => "no extradata".to_string(),
806 Some(v) => format!("{} byte(s) of extradata", v.len()),
807 };
808 write!(f, "{}: {} {}", self.name, self.properties, edata)
809 }
810 }
811
812 /// Default empty codec information.
813 pub const DUMMY_CODEC_INFO: NACodecInfo = NACodecInfo {
814 name: "none",
815 properties: NACodecTypeInfo::None,
816 extradata: None };
817
818 /// A list of accepted option values.
819 #[derive(Debug,Clone)]
820 pub enum NAValue {
821 /// Empty value.
822 None,
823 /// Integer value.
824 Int(i32),
825 /// Long integer value.
826 Long(i64),
827 /// String value.
828 String(String),
829 /// Binary data value.
830 Data(Arc<Vec<u8>>),
831 }
832
833 /// A list of recognized frame types.
834 #[derive(Debug,Clone,Copy,PartialEq)]
835 #[allow(dead_code)]
836 pub enum FrameType {
837 /// Intra frame type.
838 I,
839 /// Inter frame type.
840 P,
841 /// Bidirectionally predicted frame.
842 B,
843 /// Skip frame.
844 ///
845 /// When such frame is encountered then last frame should be used again if it is needed.
846 Skip,
847 /// Some other frame type.
848 Other,
849 }
850
851 impl fmt::Display for FrameType {
852 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
853 match *self {
854 FrameType::I => write!(f, "I"),
855 FrameType::P => write!(f, "P"),
856 FrameType::B => write!(f, "B"),
857 FrameType::Skip => write!(f, "skip"),
858 FrameType::Other => write!(f, "x"),
859 }
860 }
861 }
862
863 /// Timestamp information.
864 #[derive(Debug,Clone,Copy)]
865 pub struct NATimeInfo {
866 /// Presentation timestamp.
867 pub pts: Option<u64>,
868 /// Decode timestamp.
869 pub dts: Option<u64>,
870 /// Duration (in timebase units).
871 pub duration: Option<u64>,
872 /// Timebase numerator.
873 pub tb_num: u32,
874 /// Timebase denominator.
875 pub tb_den: u32,
876 }
877
878 impl NATimeInfo {
879 /// Constructs a new `NATimeInfo` instance.
880 pub fn new(pts: Option<u64>, dts: Option<u64>, duration: Option<u64>, tb_num: u32, tb_den: u32) -> Self {
881 NATimeInfo { pts, dts, duration, tb_num, tb_den }
882 }
883 /// Returns presentation timestamp.
884 pub fn get_pts(&self) -> Option<u64> { self.pts }
885 /// Returns decoding timestamp.
886 pub fn get_dts(&self) -> Option<u64> { self.dts }
887 /// Returns duration.
888 pub fn get_duration(&self) -> Option<u64> { self.duration }
889 /// Sets new presentation timestamp.
890 pub fn set_pts(&mut self, pts: Option<u64>) { self.pts = pts; }
891 /// Sets new decoding timestamp.
892 pub fn set_dts(&mut self, dts: Option<u64>) { self.dts = dts; }
893 /// Sets new duration.
894 pub fn set_duration(&mut self, dur: Option<u64>) { self.duration = dur; }
895
896 /// Converts time in given scale into timestamp in given base.
897 pub fn time_to_ts(time: u64, base: u64, tb_num: u32, tb_den: u32) -> u64 {
898 let tb_num = tb_num as u64;
899 let tb_den = tb_den as u64;
900 let tmp = time.checked_mul(tb_num);
901 if let Some(tmp) = tmp {
902 tmp / base / tb_den
903 } else {
904 let tmp = time.checked_mul(tb_num);
905 if let Some(tmp) = tmp {
906 tmp / base / tb_den
907 } else {
908 let coarse = time / base;
909 let tmp = coarse.checked_mul(tb_num);
910 if let Some(tmp) = tmp {
911 tmp / tb_den
912 } else {
913 (coarse / tb_den) * tb_num
914 }
915 }
916 }
917 }
918 /// Converts timestamp in given base into time in given scale.
919 pub fn ts_to_time(ts: u64, base: u64, tb_num: u32, tb_den: u32) -> u64 {
920 let tb_num = tb_num as u64;
921 let tb_den = tb_den as u64;
922 let tmp = ts.checked_mul(base);
923 if let Some(tmp) = tmp {
924 let tmp2 = tmp.checked_mul(tb_num);
925 if let Some(tmp2) = tmp2 {
926 tmp2 / tb_den
927 } else {
928 (tmp / tb_den) * tb_num
929 }
930 } else {
931 let tmp = ts.checked_mul(tb_num);
932 if let Some(tmp) = tmp {
933 (tmp / tb_den) * base
934 } else {
935 (ts / tb_den) * base * tb_num
936 }
937 }
938 }
939 }
940
941 /// Decoded frame information.
942 #[allow(dead_code)]
943 #[derive(Clone)]
944 pub struct NAFrame {
945 /// Frame timestamp.
946 pub ts: NATimeInfo,
947 /// Frame ID.
948 pub id: i64,
949 buffer: NABufferType,
950 info: NACodecInfoRef,
951 /// Frame type.
952 pub frame_type: FrameType,
953 /// Keyframe flag.
954 pub key: bool,
955 // options: HashMap<String, NAValue>,
956 }
957
958 /// A specialised type for reference-counted `NAFrame`.
959 pub type NAFrameRef = Arc<NAFrame>;
960
961 fn get_plane_size(info: &NAVideoInfo, idx: usize) -> (usize, usize) {
962 let chromaton = info.get_format().get_chromaton(idx);
963 if chromaton.is_none() { return (0, 0); }
964 let (hs, vs) = chromaton.unwrap().get_subsampling();
965 let w = (info.get_width() + ((1 << hs) - 1)) >> hs;
966 let h = (info.get_height() + ((1 << vs) - 1)) >> vs;
967 (w, h)
968 }
969
970 impl NAFrame {
971 /// Constructs a new `NAFrame` instance.
972 pub fn new(ts: NATimeInfo,
973 ftype: FrameType,
974 keyframe: bool,
975 info: NACodecInfoRef,
976 /*options: HashMap<String, NAValue>,*/
977 buffer: NABufferType) -> Self {
978 NAFrame { ts, id: 0, buffer, info, frame_type: ftype, key: keyframe/*, options*/ }
979 }
980 /// Returns frame format information.
981 pub fn get_info(&self) -> NACodecInfoRef { self.info.clone() }
982 /// Returns frame type.
983 pub fn get_frame_type(&self) -> FrameType { self.frame_type }
984 /// Reports whether the frame is a keyframe.
985 pub fn is_keyframe(&self) -> bool { self.key }
986 /// Sets new frame type.
987 pub fn set_frame_type(&mut self, ftype: FrameType) { self.frame_type = ftype; }
988 /// Sets keyframe flag.
989 pub fn set_keyframe(&mut self, key: bool) { self.key = key; }
990 /// Returns frame timestamp.
991 pub fn get_time_information(&self) -> NATimeInfo { self.ts }
992 /// Returns frame presentation time.
993 pub fn get_pts(&self) -> Option<u64> { self.ts.get_pts() }
994 /// Returns frame decoding time.
995 pub fn get_dts(&self) -> Option<u64> { self.ts.get_dts() }
996 /// Returns picture ID.
997 pub fn get_id(&self) -> i64 { self.id }
998 /// Returns frame display duration.
999 pub fn get_duration(&self) -> Option<u64> { self.ts.get_duration() }
1000 /// Sets new presentation timestamp.
1001 pub fn set_pts(&mut self, pts: Option<u64>) { self.ts.set_pts(pts); }
1002 /// Sets new decoding timestamp.
1003 pub fn set_dts(&mut self, dts: Option<u64>) { self.ts.set_dts(dts); }
1004 /// Sets new picture ID.
1005 pub fn set_id(&mut self, id: i64) { self.id = id; }
1006 /// Sets new duration.
1007 pub fn set_duration(&mut self, dur: Option<u64>) { self.ts.set_duration(dur); }
1008
1009 /// Returns a reference to the frame data.
1010 pub fn get_buffer(&self) -> NABufferType { self.buffer.clone() }
1011
1012 /// Converts current instance into a reference-counted one.
1013 pub fn into_ref(self) -> NAFrameRef { Arc::new(self) }
1014
1015 /// Creates new frame with metadata from `NAPacket`.
1016 pub fn new_from_pkt(pkt: &NAPacket, info: NACodecInfoRef, buf: NABufferType) -> NAFrame {
1017 NAFrame::new(pkt.ts, FrameType::Other, pkt.keyframe, info, /*HashMap::new(),*/ buf)
1018 }
1019 }
1020
1021 impl fmt::Display for NAFrame {
1022 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1023 let mut ostr = format!("frame type {}", self.frame_type);
1024 if let Some(pts) = self.ts.pts { ostr = format!("{} pts {}", ostr, pts); }
1025 if let Some(dts) = self.ts.dts { ostr = format!("{} dts {}", ostr, dts); }
1026 if let Some(dur) = self.ts.duration { ostr = format!("{} duration {}", ostr, dur); }
1027 if self.key { ostr = format!("{} kf", ostr); }
1028 write!(f, "[{}]", ostr)
1029 }
1030 }
1031
1032 /// A list of possible stream types.
1033 #[derive(Debug,Clone,Copy,PartialEq)]
1034 #[allow(dead_code)]
1035 pub enum StreamType {
1036 /// Video stream.
1037 Video,
1038 /// Audio stream.
1039 Audio,
1040 /// Subtitles.
1041 Subtitles,
1042 /// Any data stream (or might be an unrecognized audio/video stream).
1043 Data,
1044 /// Nonexistent stream.
1045 None,
1046 }
1047
1048 impl fmt::Display for StreamType {
1049 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1050 match *self {
1051 StreamType::Video => write!(f, "Video"),
1052 StreamType::Audio => write!(f, "Audio"),
1053 StreamType::Subtitles => write!(f, "Subtitles"),
1054 StreamType::Data => write!(f, "Data"),
1055 StreamType::None => write!(f, "-"),
1056 }
1057 }
1058 }
1059
1060 /// Stream data.
1061 #[allow(dead_code)]
1062 #[derive(Clone)]
1063 pub struct NAStream {
1064 media_type: StreamType,
1065 /// Stream ID.
1066 pub id: u32,
1067 num: usize,
1068 info: NACodecInfoRef,
1069 /// Timebase numerator.
1070 pub tb_num: u32,
1071 /// Timebase denominator.
1072 pub tb_den: u32,
1073 }
1074
1075 /// A specialised reference-counted `NAStream` type.
1076 pub type NAStreamRef = Arc<NAStream>;
1077
1078 /// Downscales the timebase by its greatest common denominator.
1079 pub fn reduce_timebase(tb_num: u32, tb_den: u32) -> (u32, u32) {
1080 if tb_num == 0 { return (tb_num, tb_den); }
1081 if (tb_den % tb_num) == 0 { return (1, tb_den / tb_num); }
1082
1083 let mut a = tb_num;
1084 let mut b = tb_den;
1085
1086 while a != b {
1087 if a > b { a -= b; }
1088 else if b > a { b -= a; }
1089 }
1090
1091 (tb_num / a, tb_den / a)
1092 }
1093
1094 impl NAStream {
1095 /// Constructs a new `NAStream` instance.
1096 pub fn new(mt: StreamType, id: u32, info: NACodecInfo, tb_num: u32, tb_den: u32) -> Self {
1097 let (n, d) = reduce_timebase(tb_num, tb_den);
1098 NAStream { media_type: mt, id, num: 0, info: info.into_ref(), tb_num: n, tb_den: d }
1099 }
1100 /// Returns stream id.
1101 pub fn get_id(&self) -> u32 { self.id }
1102 /// Returns stream type.
1103 pub fn get_media_type(&self) -> StreamType { self.media_type }
1104 /// Returns stream number assigned by demuxer.
1105 pub fn get_num(&self) -> usize { self.num }
1106 /// Sets stream number.
1107 pub fn set_num(&mut self, num: usize) { self.num = num; }
1108 /// Returns codec information.
1109 pub fn get_info(&self) -> NACodecInfoRef { self.info.clone() }
1110 /// Returns stream timebase.
1111 pub fn get_timebase(&self) -> (u32, u32) { (self.tb_num, self.tb_den) }
1112 /// Sets new stream timebase.
1113 pub fn set_timebase(&mut self, tb_num: u32, tb_den: u32) {
1114 let (n, d) = reduce_timebase(tb_num, tb_den);
1115 self.tb_num = n;
1116 self.tb_den = d;
1117 }
1118 /// Converts current instance into a reference-counted one.
1119 pub fn into_ref(self) -> NAStreamRef { Arc::new(self) }
1120 }
1121
1122 impl fmt::Display for NAStream {
1123 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1124 write!(f, "({}#{} @ {}/{} - {})", self.media_type, self.id, self.tb_num, self.tb_den, self.info.get_properties())
1125 }
1126 }
1127
1128 /// Packet with compressed data.
1129 #[allow(dead_code)]
1130 pub struct NAPacket {
1131 stream: NAStreamRef,
1132 /// Packet timestamp.
1133 pub ts: NATimeInfo,
1134 buffer: NABufferRef<Vec<u8>>,
1135 /// Keyframe flag.
1136 pub keyframe: bool,
1137 // options: HashMap<String, NAValue<'a>>,
1138 }
1139
1140 impl NAPacket {
1141 /// Constructs a new `NAPacket` instance.
1142 pub fn new(str: NAStreamRef, ts: NATimeInfo, kf: bool, vec: Vec<u8>) -> Self {
1143 // let mut vec: Vec<u8> = Vec::new();
1144 // vec.resize(size, 0);
1145 NAPacket { stream: str, ts, keyframe: kf, buffer: NABufferRef::new(vec) }
1146 }
1147 /// Returns information about the stream packet belongs to.
1148 pub fn get_stream(&self) -> NAStreamRef { self.stream.clone() }
1149 /// Returns packet timestamp.
1150 pub fn get_time_information(&self) -> NATimeInfo { self.ts }
1151 /// Returns packet presentation timestamp.
1152 pub fn get_pts(&self) -> Option<u64> { self.ts.get_pts() }
1153 /// Returns packet decoding timestamp.
1154 pub fn get_dts(&self) -> Option<u64> { self.ts.get_dts() }
1155 /// Returns packet duration.
1156 pub fn get_duration(&self) -> Option<u64> { self.ts.get_duration() }
1157 /// Reports whether this is a keyframe packet.
1158 pub fn is_keyframe(&self) -> bool { self.keyframe }
1159 /// Returns a reference to packet data.
1160 pub fn get_buffer(&self) -> NABufferRef<Vec<u8>> { self.buffer.clone() }
1161 }
1162
1163 impl Drop for NAPacket {
1164 fn drop(&mut self) {}
1165 }
1166
1167 impl fmt::Display for NAPacket {
1168 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1169 let mut ostr = format!("[pkt for {} size {}", self.stream, self.buffer.len());
1170 if let Some(pts) = self.ts.pts { ostr = format!("{} pts {}", ostr, pts); }
1171 if let Some(dts) = self.ts.dts { ostr = format!("{} dts {}", ostr, dts); }
1172 if let Some(dur) = self.ts.duration { ostr = format!("{} duration {}", ostr, dur); }
1173 if self.keyframe { ostr = format!("{} kf", ostr); }
1174 ostr += "]";
1175 write!(f, "{}", ostr)
1176 }
1177 }