add declared bitdepth to NAVideoInfo
[nihav.git] / nihav-core / src / frame.rs
1 //! Packets and decoded frames functionality.
2 use std::cmp::max;
3 //use std::collections::HashMap;
4 use std::fmt;
5 pub use std::sync::Arc;
6 pub use crate::formats::*;
7 pub use crate::refs::*;
8
9 /// Audio stream information.
10 #[allow(dead_code)]
11 #[derive(Clone,Copy,PartialEq)]
12 pub struct NAAudioInfo {
13 /// Sample rate.
14 pub sample_rate: u32,
15 /// Number of channels.
16 pub channels: u8,
17 /// Audio sample format.
18 pub format: NASoniton,
19 /// Length of one audio block in samples.
20 pub block_len: usize,
21 }
22
23 impl NAAudioInfo {
24 /// Constructs a new `NAAudioInfo` instance.
25 pub fn new(sr: u32, ch: u8, fmt: NASoniton, bl: usize) -> Self {
26 NAAudioInfo { sample_rate: sr, channels: ch, format: fmt, block_len: bl }
27 }
28 /// Returns audio sample rate.
29 pub fn get_sample_rate(&self) -> u32 { self.sample_rate }
30 /// Returns the number of channels.
31 pub fn get_channels(&self) -> u8 { self.channels }
32 /// Returns sample format.
33 pub fn get_format(&self) -> NASoniton { self.format }
34 /// Returns one audio block duration in samples.
35 pub fn get_block_len(&self) -> usize { self.block_len }
36 }
37
38 impl fmt::Display for NAAudioInfo {
39 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
40 write!(f, "{} Hz, {} ch", self.sample_rate, self.channels)
41 }
42 }
43
44 /// Video stream information.
45 #[allow(dead_code)]
46 #[derive(Clone,Copy,PartialEq)]
47 pub struct NAVideoInfo {
48 /// Picture width.
49 pub width: usize,
50 /// Picture height.
51 pub height: usize,
52 /// Picture is stored downside up.
53 pub flipped: bool,
54 /// Picture pixel format.
55 pub format: NAPixelFormaton,
56 /// Declared bits per sample.
57 pub bits: u8,
58 }
59
60 impl NAVideoInfo {
61 /// Constructs a new `NAVideoInfo` instance.
62 pub fn new(w: usize, h: usize, flip: bool, fmt: NAPixelFormaton) -> Self {
63 let bits = fmt.get_total_depth();
64 NAVideoInfo { width: w, height: h, flipped: flip, format: fmt, bits }
65 }
66 /// Returns picture width.
67 pub fn get_width(&self) -> usize { self.width as usize }
68 /// Returns picture height.
69 pub fn get_height(&self) -> usize { self.height as usize }
70 /// Returns picture orientation.
71 pub fn is_flipped(&self) -> bool { self.flipped }
72 /// Returns picture pixel format.
73 pub fn get_format(&self) -> NAPixelFormaton { self.format }
74 /// Sets new picture width.
75 pub fn set_width(&mut self, w: usize) { self.width = w; }
76 /// Sets new picture height.
77 pub fn set_height(&mut self, h: usize) { self.height = h; }
78 }
79
80 impl fmt::Display for NAVideoInfo {
81 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
82 write!(f, "{}x{}", self.width, self.height)
83 }
84 }
85
86 /// A list of possible stream information types.
87 #[derive(Clone,Copy,PartialEq)]
88 pub enum NACodecTypeInfo {
89 /// No codec present.
90 None,
91 /// Audio codec information.
92 Audio(NAAudioInfo),
93 /// Video codec information.
94 Video(NAVideoInfo),
95 }
96
97 impl NACodecTypeInfo {
98 /// Returns video stream information.
99 pub fn get_video_info(&self) -> Option<NAVideoInfo> {
100 match *self {
101 NACodecTypeInfo::Video(vinfo) => Some(vinfo),
102 _ => None,
103 }
104 }
105 /// Returns audio stream information.
106 pub fn get_audio_info(&self) -> Option<NAAudioInfo> {
107 match *self {
108 NACodecTypeInfo::Audio(ainfo) => Some(ainfo),
109 _ => None,
110 }
111 }
112 /// Reports whether the current stream is video stream.
113 pub fn is_video(&self) -> bool {
114 match *self {
115 NACodecTypeInfo::Video(_) => true,
116 _ => false,
117 }
118 }
119 /// Reports whether the current stream is audio stream.
120 pub fn is_audio(&self) -> bool {
121 match *self {
122 NACodecTypeInfo::Audio(_) => true,
123 _ => false,
124 }
125 }
126 }
127
128 impl fmt::Display for NACodecTypeInfo {
129 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
130 let ret = match *self {
131 NACodecTypeInfo::None => "".to_string(),
132 NACodecTypeInfo::Audio(fmt) => format!("{}", fmt),
133 NACodecTypeInfo::Video(fmt) => format!("{}", fmt),
134 };
135 write!(f, "{}", ret)
136 }
137 }
138
139 /// Decoded video frame.
140 ///
141 /// NihAV frames are stored in native type (8/16/32-bit elements) inside a single buffer.
142 /// In case of image with several components those components are stored sequentially and can be accessed in the buffer starting at corresponding component offset.
143 #[derive(Clone)]
144 pub struct NAVideoBuffer<T> {
145 info: NAVideoInfo,
146 data: NABufferRef<Vec<T>>,
147 offs: Vec<usize>,
148 strides: Vec<usize>,
149 }
150
151 impl<T: Clone> NAVideoBuffer<T> {
152 /// Returns the component offset (0 for all unavailable offsets).
153 pub fn get_offset(&self, idx: usize) -> usize {
154 if idx >= self.offs.len() { 0 }
155 else { self.offs[idx] }
156 }
157 /// Returns picture info.
158 pub fn get_info(&self) -> NAVideoInfo { self.info }
159 /// Returns an immutable reference to the data.
160 pub fn get_data(&self) -> &Vec<T> { self.data.as_ref() }
161 /// Returns a mutable reference to the data.
162 pub fn get_data_mut(&mut self) -> Option<&mut Vec<T>> { self.data.as_mut() }
163 /// Returns the number of components in picture format.
164 pub fn get_num_components(&self) -> usize { self.offs.len() }
165 /// Creates a copy of current `NAVideoBuffer`.
166 pub fn copy_buffer(&mut self) -> Self {
167 let mut data: Vec<T> = Vec::with_capacity(self.data.len());
168 data.clone_from(self.data.as_ref());
169 let mut offs: Vec<usize> = Vec::with_capacity(self.offs.len());
170 offs.clone_from(&self.offs);
171 let mut strides: Vec<usize> = Vec::with_capacity(self.strides.len());
172 strides.clone_from(&self.strides);
173 NAVideoBuffer { info: self.info, data: NABufferRef::new(data), offs, strides }
174 }
175 /// Returns stride (distance between subsequent lines) for the requested component.
176 pub fn get_stride(&self, idx: usize) -> usize {
177 if idx >= self.strides.len() { return 0; }
178 self.strides[idx]
179 }
180 /// Returns requested component dimensions.
181 pub fn get_dimensions(&self, idx: usize) -> (usize, usize) {
182 get_plane_size(&self.info, idx)
183 }
184 /// Converts current instance into buffer reference.
185 pub fn into_ref(self) -> NABufferRef<Self> {
186 NABufferRef::new(self)
187 }
188
189 fn print_contents(&self, datatype: &str) {
190 println!("{} video buffer size {}", datatype, self.data.len());
191 println!(" format {}", self.info);
192 print!(" offsets:");
193 for off in self.offs.iter() {
194 print!(" {}", *off);
195 }
196 println!();
197 print!(" strides:");
198 for stride in self.strides.iter() {
199 print!(" {}", *stride);
200 }
201 println!();
202 }
203 }
204
205 /// A specialised type for reference-counted `NAVideoBuffer`.
206 pub type NAVideoBufferRef<T> = NABufferRef<NAVideoBuffer<T>>;
207
208 /// Decoded audio frame.
209 ///
210 /// NihAV frames are stored in native type (8/16/32-bit elements) inside a single buffer.
211 /// In case of planar audio samples for each channel are stored sequentially and can be accessed in the buffer starting at corresponding channel offset.
212 #[derive(Clone)]
213 pub struct NAAudioBuffer<T> {
214 info: NAAudioInfo,
215 data: NABufferRef<Vec<T>>,
216 offs: Vec<usize>,
217 stride: usize,
218 step: usize,
219 chmap: NAChannelMap,
220 len: usize,
221 }
222
223 impl<T: Clone> NAAudioBuffer<T> {
224 /// Returns the start position of requested channel data.
225 pub fn get_offset(&self, idx: usize) -> usize {
226 if idx >= self.offs.len() { 0 }
227 else { self.offs[idx] }
228 }
229 /// Returns the distance between the start of one channel and the next one.
230 pub fn get_stride(&self) -> usize { self.stride }
231 /// Returns the distance between the samples in one channel.
232 pub fn get_step(&self) -> usize { self.step }
233 /// Returns audio format information.
234 pub fn get_info(&self) -> NAAudioInfo { self.info }
235 /// Returns channel map.
236 pub fn get_chmap(&self) -> &NAChannelMap { &self.chmap }
237 /// Returns an immutable reference to the data.
238 pub fn get_data(&self) -> &Vec<T> { self.data.as_ref() }
239 /// Returns reference to the data.
240 pub fn get_data_ref(&self) -> NABufferRef<Vec<T>> { self.data.clone() }
241 /// Returns a mutable reference to the data.
242 pub fn get_data_mut(&mut self) -> Option<&mut Vec<T>> { self.data.as_mut() }
243 /// Clones current `NAAudioBuffer` into a new one.
244 pub fn copy_buffer(&mut self) -> Self {
245 let mut data: Vec<T> = Vec::with_capacity(self.data.len());
246 data.clone_from(self.data.as_ref());
247 let mut offs: Vec<usize> = Vec::with_capacity(self.offs.len());
248 offs.clone_from(&self.offs);
249 NAAudioBuffer { info: self.info, data: NABufferRef::new(data), offs, chmap: self.get_chmap().clone(), len: self.len, stride: self.stride, step: self.step }
250 }
251 /// Return the length of frame in samples.
252 pub fn get_length(&self) -> usize { self.len }
253
254 fn print_contents(&self, datatype: &str) {
255 println!("Audio buffer with {} data, stride {}, step {}", datatype, self.stride, self.step);
256 println!(" format {}", self.info);
257 println!(" channel map {}", self.chmap);
258 print!(" offsets:");
259 for off in self.offs.iter() {
260 print!(" {}", *off);
261 }
262 println!();
263 }
264 }
265
266 impl NAAudioBuffer<u8> {
267 /// Constructs a new `NAAudioBuffer` instance.
268 pub fn new_from_buf(info: NAAudioInfo, data: NABufferRef<Vec<u8>>, chmap: NAChannelMap) -> Self {
269 let len = data.len();
270 NAAudioBuffer { info, data, chmap, offs: Vec::new(), len, stride: 0, step: 0 }
271 }
272 }
273
274 /// A list of possible decoded frame types.
275 #[derive(Clone)]
276 pub enum NABufferType {
277 /// 8-bit video buffer.
278 Video (NAVideoBufferRef<u8>),
279 /// 16-bit video buffer (i.e. every component or packed pixel fits into 16 bits).
280 Video16 (NAVideoBufferRef<u16>),
281 /// 32-bit video buffer (i.e. every component or packed pixel fits into 32 bits).
282 Video32 (NAVideoBufferRef<u32>),
283 /// Packed video buffer.
284 VideoPacked(NAVideoBufferRef<u8>),
285 /// Audio buffer with 8-bit unsigned integer audio.
286 AudioU8 (NAAudioBuffer<u8>),
287 /// Audio buffer with 16-bit signed integer audio.
288 AudioI16 (NAAudioBuffer<i16>),
289 /// Audio buffer with 32-bit signed integer audio.
290 AudioI32 (NAAudioBuffer<i32>),
291 /// Audio buffer with 32-bit floating point audio.
292 AudioF32 (NAAudioBuffer<f32>),
293 /// Packed audio buffer.
294 AudioPacked(NAAudioBuffer<u8>),
295 /// Buffer with generic data (e.g. subtitles).
296 Data (NABufferRef<Vec<u8>>),
297 /// No data present.
298 None,
299 }
300
301 impl NABufferType {
302 /// Returns the offset to the requested component or channel.
303 pub fn get_offset(&self, idx: usize) -> usize {
304 match *self {
305 NABufferType::Video(ref vb) => vb.get_offset(idx),
306 NABufferType::Video16(ref vb) => vb.get_offset(idx),
307 NABufferType::Video32(ref vb) => vb.get_offset(idx),
308 NABufferType::VideoPacked(ref vb) => vb.get_offset(idx),
309 NABufferType::AudioU8(ref ab) => ab.get_offset(idx),
310 NABufferType::AudioI16(ref ab) => ab.get_offset(idx),
311 NABufferType::AudioI32(ref ab) => ab.get_offset(idx),
312 NABufferType::AudioF32(ref ab) => ab.get_offset(idx),
313 NABufferType::AudioPacked(ref ab) => ab.get_offset(idx),
314 _ => 0,
315 }
316 }
317 /// Returns information for video frames.
318 pub fn get_video_info(&self) -> Option<NAVideoInfo> {
319 match *self {
320 NABufferType::Video(ref vb) => Some(vb.get_info()),
321 NABufferType::Video16(ref vb) => Some(vb.get_info()),
322 NABufferType::Video32(ref vb) => Some(vb.get_info()),
323 NABufferType::VideoPacked(ref vb) => Some(vb.get_info()),
324 _ => None,
325 }
326 }
327 /// Returns reference to 8-bit (or packed) video buffer.
328 pub fn get_vbuf(&self) -> Option<NAVideoBufferRef<u8>> {
329 match *self {
330 NABufferType::Video(ref vb) => Some(vb.clone()),
331 NABufferType::VideoPacked(ref vb) => Some(vb.clone()),
332 _ => None,
333 }
334 }
335 /// Returns reference to 16-bit video buffer.
336 pub fn get_vbuf16(&self) -> Option<NAVideoBufferRef<u16>> {
337 match *self {
338 NABufferType::Video16(ref vb) => Some(vb.clone()),
339 _ => None,
340 }
341 }
342 /// Returns reference to 32-bit video buffer.
343 pub fn get_vbuf32(&self) -> Option<NAVideoBufferRef<u32>> {
344 match *self {
345 NABufferType::Video32(ref vb) => Some(vb.clone()),
346 _ => None,
347 }
348 }
349 /// Returns information for audio frames.
350 pub fn get_audio_info(&self) -> Option<NAAudioInfo> {
351 match *self {
352 NABufferType::AudioU8(ref ab) => Some(ab.get_info()),
353 NABufferType::AudioI16(ref ab) => Some(ab.get_info()),
354 NABufferType::AudioI32(ref ab) => Some(ab.get_info()),
355 NABufferType::AudioF32(ref ab) => Some(ab.get_info()),
356 NABufferType::AudioPacked(ref ab) => Some(ab.get_info()),
357 _ => None,
358 }
359 }
360 /// Returns audio channel map.
361 pub fn get_chmap(&self) -> Option<&NAChannelMap> {
362 match *self {
363 NABufferType::AudioU8(ref ab) => Some(ab.get_chmap()),
364 NABufferType::AudioI16(ref ab) => Some(ab.get_chmap()),
365 NABufferType::AudioI32(ref ab) => Some(ab.get_chmap()),
366 NABufferType::AudioF32(ref ab) => Some(ab.get_chmap()),
367 NABufferType::AudioPacked(ref ab) => Some(ab.get_chmap()),
368 _ => None,
369 }
370 }
371 /// Returns audio frame duration in samples.
372 pub fn get_audio_length(&self) -> usize {
373 match *self {
374 NABufferType::AudioU8(ref ab) => ab.get_length(),
375 NABufferType::AudioI16(ref ab) => ab.get_length(),
376 NABufferType::AudioI32(ref ab) => ab.get_length(),
377 NABufferType::AudioF32(ref ab) => ab.get_length(),
378 NABufferType::AudioPacked(ref ab) => ab.get_length(),
379 _ => 0,
380 }
381 }
382 /// Returns the distance between starts of two channels.
383 pub fn get_audio_stride(&self) -> usize {
384 match *self {
385 NABufferType::AudioU8(ref ab) => ab.get_stride(),
386 NABufferType::AudioI16(ref ab) => ab.get_stride(),
387 NABufferType::AudioI32(ref ab) => ab.get_stride(),
388 NABufferType::AudioF32(ref ab) => ab.get_stride(),
389 NABufferType::AudioPacked(ref ab) => ab.get_stride(),
390 _ => 0,
391 }
392 }
393 /// Returns the distance between two samples in one channel.
394 pub fn get_audio_step(&self) -> usize {
395 match *self {
396 NABufferType::AudioU8(ref ab) => ab.get_step(),
397 NABufferType::AudioI16(ref ab) => ab.get_step(),
398 NABufferType::AudioI32(ref ab) => ab.get_step(),
399 NABufferType::AudioF32(ref ab) => ab.get_step(),
400 NABufferType::AudioPacked(ref ab) => ab.get_step(),
401 _ => 0,
402 }
403 }
404 /// Returns reference to 8-bit (or packed) audio buffer.
405 pub fn get_abuf_u8(&self) -> Option<NAAudioBuffer<u8>> {
406 match *self {
407 NABufferType::AudioU8(ref ab) => Some(ab.clone()),
408 NABufferType::AudioPacked(ref ab) => Some(ab.clone()),
409 _ => None,
410 }
411 }
412 /// Returns reference to 16-bit audio buffer.
413 pub fn get_abuf_i16(&self) -> Option<NAAudioBuffer<i16>> {
414 match *self {
415 NABufferType::AudioI16(ref ab) => Some(ab.clone()),
416 _ => None,
417 }
418 }
419 /// Returns reference to 32-bit integer audio buffer.
420 pub fn get_abuf_i32(&self) -> Option<NAAudioBuffer<i32>> {
421 match *self {
422 NABufferType::AudioI32(ref ab) => Some(ab.clone()),
423 _ => None,
424 }
425 }
426 /// Returns reference to 32-bit floating point audio buffer.
427 pub fn get_abuf_f32(&self) -> Option<NAAudioBuffer<f32>> {
428 match *self {
429 NABufferType::AudioF32(ref ab) => Some(ab.clone()),
430 _ => None,
431 }
432 }
433 /// Prints internal buffer layout.
434 pub fn print_buffer_metadata(&self) {
435 match *self {
436 NABufferType::Video(ref buf) => buf.print_contents("8-bit"),
437 NABufferType::Video16(ref buf) => buf.print_contents("16-bit"),
438 NABufferType::Video32(ref buf) => buf.print_contents("32-bit"),
439 NABufferType::VideoPacked(ref buf) => buf.print_contents("packed"),
440 NABufferType::AudioU8(ref buf) => buf.print_contents("8-bit unsigned integer"),
441 NABufferType::AudioI16(ref buf) => buf.print_contents("16-bit integer"),
442 NABufferType::AudioI32(ref buf) => buf.print_contents("32-bit integer"),
443 NABufferType::AudioF32(ref buf) => buf.print_contents("32-bit float"),
444 NABufferType::AudioPacked(ref buf) => buf.print_contents("packed"),
445 NABufferType::Data(ref buf) => { println!("Data buffer, len = {}", buf.len()); },
446 NABufferType::None => { println!("No buffer"); },
447 };
448 }
449 }
450
451 const NA_SIMPLE_VFRAME_COMPONENTS: usize = 4;
452 /// Simplified decoded frame data.
453 pub struct NASimpleVideoFrame<'a, T: Copy> {
454 /// Widths of each picture component.
455 pub width: [usize; NA_SIMPLE_VFRAME_COMPONENTS],
456 /// Heights of each picture component.
457 pub height: [usize; NA_SIMPLE_VFRAME_COMPONENTS],
458 /// Orientation (upside-down or downside-up) flag.
459 pub flip: bool,
460 /// Strides for each component.
461 pub stride: [usize; NA_SIMPLE_VFRAME_COMPONENTS],
462 /// Start of each component.
463 pub offset: [usize; NA_SIMPLE_VFRAME_COMPONENTS],
464 /// Number of components.
465 pub components: usize,
466 /// Pointer to the picture pixel data.
467 pub data: &'a mut [T],
468 }
469
470 impl<'a, T:Copy> NASimpleVideoFrame<'a, T> {
471 /// Constructs a new instance of `NASimpleVideoFrame` from `NAVideoBuffer`.
472 pub fn from_video_buf(vbuf: &'a mut NAVideoBuffer<T>) -> Option<Self> {
473 let vinfo = vbuf.get_info();
474 let components = vinfo.format.components as usize;
475 if components > NA_SIMPLE_VFRAME_COMPONENTS {
476 return None;
477 }
478 let mut w: [usize; NA_SIMPLE_VFRAME_COMPONENTS] = [0; NA_SIMPLE_VFRAME_COMPONENTS];
479 let mut h: [usize; NA_SIMPLE_VFRAME_COMPONENTS] = [0; NA_SIMPLE_VFRAME_COMPONENTS];
480 let mut s: [usize; NA_SIMPLE_VFRAME_COMPONENTS] = [0; NA_SIMPLE_VFRAME_COMPONENTS];
481 let mut o: [usize; NA_SIMPLE_VFRAME_COMPONENTS] = [0; NA_SIMPLE_VFRAME_COMPONENTS];
482 for comp in 0..components {
483 let (width, height) = vbuf.get_dimensions(comp);
484 w[comp] = width;
485 h[comp] = height;
486 s[comp] = vbuf.get_stride(comp);
487 o[comp] = vbuf.get_offset(comp);
488 }
489 let flip = vinfo.flipped;
490 Some(NASimpleVideoFrame {
491 width: w,
492 height: h,
493 flip,
494 stride: s,
495 offset: o,
496 components,
497 data: vbuf.data.as_mut_slice(),
498 })
499 }
500 }
501
502 /// A list of possible frame allocator errors.
503 #[derive(Debug,Clone,Copy,PartialEq)]
504 pub enum AllocatorError {
505 /// Requested picture dimensions are too large.
506 TooLargeDimensions,
507 /// Invalid input format.
508 FormatError,
509 }
510
511 /// Constructs a new video buffer with requested format.
512 ///
513 /// `align` is power of two alignment for image. E.g. the value of 5 means that frame dimensions will be padded to be multiple of 32.
514 pub fn alloc_video_buffer(vinfo: NAVideoInfo, align: u8) -> Result<NABufferType, AllocatorError> {
515 let fmt = &vinfo.format;
516 let mut new_size: usize = 0;
517 let mut offs: Vec<usize> = Vec::new();
518 let mut strides: Vec<usize> = Vec::new();
519
520 for i in 0..fmt.get_num_comp() {
521 if fmt.get_chromaton(i) == None { return Err(AllocatorError::FormatError); }
522 }
523
524 let align_mod = ((1 << align) as usize) - 1;
525 let width = ((vinfo.width as usize) + align_mod) & !align_mod;
526 let height = ((vinfo.height as usize) + align_mod) & !align_mod;
527 let mut max_depth = 0;
528 let mut all_packed = true;
529 let mut all_bytealigned = true;
530 for i in 0..fmt.get_num_comp() {
531 let ochr = fmt.get_chromaton(i);
532 if ochr.is_none() { continue; }
533 let chr = ochr.unwrap();
534 if !chr.is_packed() {
535 all_packed = false;
536 } else if ((chr.get_shift() + chr.get_depth()) & 7) != 0 {
537 all_bytealigned = false;
538 }
539 max_depth = max(max_depth, chr.get_depth());
540 }
541 let unfit_elem_size = match fmt.get_elem_size() {
542 2 | 4 => false,
543 _ => true,
544 };
545
546 //todo semi-packed like NV12
547 if fmt.is_paletted() {
548 //todo various-sized palettes?
549 let stride = vinfo.get_format().get_chromaton(0).unwrap().get_linesize(width);
550 let pic_sz = stride.checked_mul(height);
551 if pic_sz == None { return Err(AllocatorError::TooLargeDimensions); }
552 let pal_size = 256 * (fmt.get_elem_size() as usize);
553 let new_size = pic_sz.unwrap().checked_add(pal_size);
554 if new_size == None { return Err(AllocatorError::TooLargeDimensions); }
555 offs.push(0);
556 offs.push(stride * height);
557 strides.push(stride);
558 let data: Vec<u8> = vec![0; new_size.unwrap()];
559 let buf: NAVideoBuffer<u8> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
560 Ok(NABufferType::Video(buf.into_ref()))
561 } else if !all_packed {
562 for i in 0..fmt.get_num_comp() {
563 let ochr = fmt.get_chromaton(i);
564 if ochr.is_none() { continue; }
565 let chr = ochr.unwrap();
566 offs.push(new_size as usize);
567 let stride = chr.get_linesize(width);
568 let cur_h = chr.get_height(height);
569 let cur_sz = stride.checked_mul(cur_h);
570 if cur_sz == None { return Err(AllocatorError::TooLargeDimensions); }
571 let new_sz = new_size.checked_add(cur_sz.unwrap());
572 if new_sz == None { return Err(AllocatorError::TooLargeDimensions); }
573 new_size = new_sz.unwrap();
574 strides.push(stride);
575 }
576 if max_depth <= 8 {
577 let data: Vec<u8> = vec![0; new_size];
578 let buf: NAVideoBuffer<u8> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
579 Ok(NABufferType::Video(buf.into_ref()))
580 } else if max_depth <= 16 {
581 let data: Vec<u16> = vec![0; new_size];
582 let buf: NAVideoBuffer<u16> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
583 Ok(NABufferType::Video16(buf.into_ref()))
584 } else {
585 let data: Vec<u32> = vec![0; new_size];
586 let buf: NAVideoBuffer<u32> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
587 Ok(NABufferType::Video32(buf.into_ref()))
588 }
589 } else if all_bytealigned || unfit_elem_size {
590 let elem_sz = fmt.get_elem_size();
591 let line_sz = width.checked_mul(elem_sz as usize);
592 if line_sz == None { return Err(AllocatorError::TooLargeDimensions); }
593 let new_sz = line_sz.unwrap().checked_mul(height);
594 if new_sz == None { return Err(AllocatorError::TooLargeDimensions); }
595 new_size = new_sz.unwrap();
596 let data: Vec<u8> = vec![0; new_size];
597 strides.push(line_sz.unwrap());
598 let buf: NAVideoBuffer<u8> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
599 Ok(NABufferType::VideoPacked(buf.into_ref()))
600 } else {
601 let elem_sz = fmt.get_elem_size();
602 let new_sz = width.checked_mul(height);
603 if new_sz == None { return Err(AllocatorError::TooLargeDimensions); }
604 new_size = new_sz.unwrap();
605 match elem_sz {
606 2 => {
607 let data: Vec<u16> = vec![0; new_size];
608 strides.push(width);
609 let buf: NAVideoBuffer<u16> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
610 Ok(NABufferType::Video16(buf.into_ref()))
611 },
612 4 => {
613 let data: Vec<u32> = vec![0; new_size];
614 strides.push(width);
615 let buf: NAVideoBuffer<u32> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
616 Ok(NABufferType::Video32(buf.into_ref()))
617 },
618 _ => unreachable!(),
619 }
620 }
621 }
622
623 /// Constructs a new audio buffer for the requested format and length.
624 #[allow(clippy::collapsible_if)]
625 pub fn alloc_audio_buffer(ainfo: NAAudioInfo, nsamples: usize, chmap: NAChannelMap) -> Result<NABufferType, AllocatorError> {
626 let mut offs: Vec<usize> = Vec::new();
627 if ainfo.format.is_planar() || ((ainfo.format.get_bits() % 8) == 0) {
628 let len = nsamples.checked_mul(ainfo.channels as usize);
629 if len == None { return Err(AllocatorError::TooLargeDimensions); }
630 let length = len.unwrap();
631 let stride;
632 let step;
633 if ainfo.format.is_planar() {
634 stride = nsamples;
635 step = 1;
636 for i in 0..ainfo.channels {
637 offs.push((i as usize) * stride);
638 }
639 } else {
640 stride = 1;
641 step = ainfo.channels as usize;
642 for i in 0..ainfo.channels {
643 offs.push(i as usize);
644 }
645 }
646 if ainfo.format.is_float() {
647 if ainfo.format.get_bits() == 32 {
648 let data: Vec<f32> = vec![0.0; length];
649 let buf: NAAudioBuffer<f32> = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs, chmap, len: nsamples, stride, step };
650 Ok(NABufferType::AudioF32(buf))
651 } else {
652 Err(AllocatorError::TooLargeDimensions)
653 }
654 } else {
655 if ainfo.format.get_bits() == 8 && !ainfo.format.is_signed() {
656 let data: Vec<u8> = vec![0; length];
657 let buf: NAAudioBuffer<u8> = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs, chmap, len: nsamples, stride, step };
658 Ok(NABufferType::AudioU8(buf))
659 } else if ainfo.format.get_bits() == 16 && ainfo.format.is_signed() {
660 let data: Vec<i16> = vec![0; length];
661 let buf: NAAudioBuffer<i16> = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs, chmap, len: nsamples, stride, step };
662 Ok(NABufferType::AudioI16(buf))
663 } else {
664 Err(AllocatorError::TooLargeDimensions)
665 }
666 }
667 } else {
668 let len = nsamples.checked_mul(ainfo.channels as usize);
669 if len == None { return Err(AllocatorError::TooLargeDimensions); }
670 let length = ainfo.format.get_audio_size(len.unwrap() as u64);
671 let data: Vec<u8> = vec![0; length];
672 let buf: NAAudioBuffer<u8> = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs, chmap, len: nsamples, stride: 0, step: 0 };
673 Ok(NABufferType::AudioPacked(buf))
674 }
675 }
676
677 /// Constructs a new buffer for generic data.
678 pub fn alloc_data_buffer(size: usize) -> Result<NABufferType, AllocatorError> {
679 let data: Vec<u8> = vec![0; size];
680 let buf: NABufferRef<Vec<u8>> = NABufferRef::new(data);
681 Ok(NABufferType::Data(buf))
682 }
683
684 /// Creates a clone of current buffer.
685 pub fn copy_buffer(buf: NABufferType) -> NABufferType {
686 buf.clone()
687 }
688
689 /// Video frame pool.
690 ///
691 /// This structure allows codec to effectively reuse old frames instead of allocating and de-allocating frames every time.
692 /// Caller can also reserve some frames for its own purposes e.g. display queue.
693 pub struct NAVideoBufferPool<T:Copy> {
694 pool: Vec<NAVideoBufferRef<T>>,
695 max_len: usize,
696 add_len: usize,
697 }
698
699 impl<T:Copy> NAVideoBufferPool<T> {
700 /// Constructs a new `NAVideoBufferPool` instance.
701 pub fn new(max_len: usize) -> Self {
702 Self {
703 pool: Vec::with_capacity(max_len),
704 max_len,
705 add_len: 0,
706 }
707 }
708 /// Sets the number of buffers reserved for the user.
709 pub fn set_dec_bufs(&mut self, add_len: usize) {
710 self.add_len = add_len;
711 }
712 /// Returns an unused buffer from the pool.
713 pub fn get_free(&mut self) -> Option<NAVideoBufferRef<T>> {
714 for e in self.pool.iter() {
715 if e.get_num_refs() == 1 {
716 return Some(e.clone());
717 }
718 }
719 None
720 }
721 /// Clones provided frame data into a free pool frame.
722 pub fn get_copy(&mut self, rbuf: &NAVideoBufferRef<T>) -> Option<NAVideoBufferRef<T>> {
723 let mut dbuf = self.get_free()?;
724 dbuf.data.copy_from_slice(&rbuf.data);
725 Some(dbuf)
726 }
727 /// Clears the pool from all frames.
728 pub fn reset(&mut self) {
729 self.pool.truncate(0);
730 }
731 }
732
733 impl NAVideoBufferPool<u8> {
734 /// Allocates the target amount of video frames using [`alloc_video_buffer`].
735 ///
736 /// [`alloc_video_buffer`]: ./fn.alloc_video_buffer.html
737 pub fn prealloc_video(&mut self, vinfo: NAVideoInfo, align: u8) -> Result<(), AllocatorError> {
738 let nbufs = self.max_len + self.add_len - self.pool.len();
739 for _ in 0..nbufs {
740 let vbuf = alloc_video_buffer(vinfo, align)?;
741 if let NABufferType::Video(buf) = vbuf {
742 self.pool.push(buf);
743 } else if let NABufferType::VideoPacked(buf) = vbuf {
744 self.pool.push(buf);
745 } else {
746 return Err(AllocatorError::FormatError);
747 }
748 }
749 Ok(())
750 }
751 }
752
753 impl NAVideoBufferPool<u16> {
754 /// Allocates the target amount of video frames using [`alloc_video_buffer`].
755 ///
756 /// [`alloc_video_buffer`]: ./fn.alloc_video_buffer.html
757 pub fn prealloc_video(&mut self, vinfo: NAVideoInfo, align: u8) -> Result<(), AllocatorError> {
758 let nbufs = self.max_len + self.add_len - self.pool.len();
759 for _ in 0..nbufs {
760 let vbuf = alloc_video_buffer(vinfo, align)?;
761 if let NABufferType::Video16(buf) = vbuf {
762 self.pool.push(buf);
763 } else {
764 return Err(AllocatorError::FormatError);
765 }
766 }
767 Ok(())
768 }
769 }
770
771 impl NAVideoBufferPool<u32> {
772 /// Allocates the target amount of video frames using [`alloc_video_buffer`].
773 ///
774 /// [`alloc_video_buffer`]: ./fn.alloc_video_buffer.html
775 pub fn prealloc_video(&mut self, vinfo: NAVideoInfo, align: u8) -> Result<(), AllocatorError> {
776 let nbufs = self.max_len + self.add_len - self.pool.len();
777 for _ in 0..nbufs {
778 let vbuf = alloc_video_buffer(vinfo, align)?;
779 if let NABufferType::Video32(buf) = vbuf {
780 self.pool.push(buf);
781 } else {
782 return Err(AllocatorError::FormatError);
783 }
784 }
785 Ok(())
786 }
787 }
788
789 /// Information about codec contained in a stream.
790 #[allow(dead_code)]
791 #[derive(Clone)]
792 pub struct NACodecInfo {
793 name: &'static str,
794 properties: NACodecTypeInfo,
795 extradata: Option<Arc<Vec<u8>>>,
796 }
797
798 /// A specialised type for reference-counted `NACodecInfo`.
799 pub type NACodecInfoRef = Arc<NACodecInfo>;
800
801 impl NACodecInfo {
802 /// Constructs a new instance of `NACodecInfo`.
803 pub fn new(name: &'static str, p: NACodecTypeInfo, edata: Option<Vec<u8>>) -> Self {
804 let extradata = match edata {
805 None => None,
806 Some(vec) => Some(Arc::new(vec)),
807 };
808 NACodecInfo { name, properties: p, extradata }
809 }
810 /// Constructs a new reference-counted instance of `NACodecInfo`.
811 pub fn new_ref(name: &'static str, p: NACodecTypeInfo, edata: Option<Arc<Vec<u8>>>) -> Self {
812 NACodecInfo { name, properties: p, extradata: edata }
813 }
814 /// Converts current instance into a reference-counted one.
815 pub fn into_ref(self) -> NACodecInfoRef { Arc::new(self) }
816 /// Returns codec information.
817 pub fn get_properties(&self) -> NACodecTypeInfo { self.properties }
818 /// Returns additional initialisation data required by the codec.
819 pub fn get_extradata(&self) -> Option<Arc<Vec<u8>>> {
820 if let Some(ref vec) = self.extradata { return Some(vec.clone()); }
821 None
822 }
823 /// Returns codec name.
824 pub fn get_name(&self) -> &'static str { self.name }
825 /// Reports whether it is a video codec.
826 pub fn is_video(&self) -> bool {
827 if let NACodecTypeInfo::Video(_) = self.properties { return true; }
828 false
829 }
830 /// Reports whether it is an audio codec.
831 pub fn is_audio(&self) -> bool {
832 if let NACodecTypeInfo::Audio(_) = self.properties { return true; }
833 false
834 }
835 /// Constructs a new empty reference-counted instance of `NACodecInfo`.
836 pub fn new_dummy() -> Arc<Self> {
837 Arc::new(DUMMY_CODEC_INFO)
838 }
839 /// Updates codec infomation.
840 pub fn replace_info(&self, p: NACodecTypeInfo) -> Arc<Self> {
841 Arc::new(NACodecInfo { name: self.name, properties: p, extradata: self.extradata.clone() })
842 }
843 }
844
845 impl Default for NACodecInfo {
846 fn default() -> Self { DUMMY_CODEC_INFO }
847 }
848
849 impl fmt::Display for NACodecInfo {
850 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
851 let edata = match self.extradata.clone() {
852 None => "no extradata".to_string(),
853 Some(v) => format!("{} byte(s) of extradata", v.len()),
854 };
855 write!(f, "{}: {} {}", self.name, self.properties, edata)
856 }
857 }
858
859 /// Default empty codec information.
860 pub const DUMMY_CODEC_INFO: NACodecInfo = NACodecInfo {
861 name: "none",
862 properties: NACodecTypeInfo::None,
863 extradata: None };
864
865 /// A list of recognized frame types.
866 #[derive(Debug,Clone,Copy,PartialEq)]
867 #[allow(dead_code)]
868 pub enum FrameType {
869 /// Intra frame type.
870 I,
871 /// Inter frame type.
872 P,
873 /// Bidirectionally predicted frame.
874 B,
875 /// Skip frame.
876 ///
877 /// When such frame is encountered then last frame should be used again if it is needed.
878 Skip,
879 /// Some other frame type.
880 Other,
881 }
882
883 impl fmt::Display for FrameType {
884 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
885 match *self {
886 FrameType::I => write!(f, "I"),
887 FrameType::P => write!(f, "P"),
888 FrameType::B => write!(f, "B"),
889 FrameType::Skip => write!(f, "skip"),
890 FrameType::Other => write!(f, "x"),
891 }
892 }
893 }
894
895 /// Timestamp information.
896 #[derive(Debug,Clone,Copy)]
897 pub struct NATimeInfo {
898 /// Presentation timestamp.
899 pub pts: Option<u64>,
900 /// Decode timestamp.
901 pub dts: Option<u64>,
902 /// Duration (in timebase units).
903 pub duration: Option<u64>,
904 /// Timebase numerator.
905 pub tb_num: u32,
906 /// Timebase denominator.
907 pub tb_den: u32,
908 }
909
910 impl NATimeInfo {
911 /// Constructs a new `NATimeInfo` instance.
912 pub fn new(pts: Option<u64>, dts: Option<u64>, duration: Option<u64>, tb_num: u32, tb_den: u32) -> Self {
913 NATimeInfo { pts, dts, duration, tb_num, tb_den }
914 }
915 /// Returns presentation timestamp.
916 pub fn get_pts(&self) -> Option<u64> { self.pts }
917 /// Returns decoding timestamp.
918 pub fn get_dts(&self) -> Option<u64> { self.dts }
919 /// Returns duration.
920 pub fn get_duration(&self) -> Option<u64> { self.duration }
921 /// Sets new presentation timestamp.
922 pub fn set_pts(&mut self, pts: Option<u64>) { self.pts = pts; }
923 /// Sets new decoding timestamp.
924 pub fn set_dts(&mut self, dts: Option<u64>) { self.dts = dts; }
925 /// Sets new duration.
926 pub fn set_duration(&mut self, dur: Option<u64>) { self.duration = dur; }
927
928 /// Converts time in given scale into timestamp in given base.
929 pub fn time_to_ts(time: u64, base: u64, tb_num: u32, tb_den: u32) -> u64 {
930 let tb_num = tb_num as u64;
931 let tb_den = tb_den as u64;
932 let tmp = time.checked_mul(tb_num);
933 if let Some(tmp) = tmp {
934 tmp / base / tb_den
935 } else {
936 let tmp = time.checked_mul(tb_num);
937 if let Some(tmp) = tmp {
938 tmp / base / tb_den
939 } else {
940 let coarse = time / base;
941 let tmp = coarse.checked_mul(tb_num);
942 if let Some(tmp) = tmp {
943 tmp / tb_den
944 } else {
945 (coarse / tb_den) * tb_num
946 }
947 }
948 }
949 }
950 /// Converts timestamp in given base into time in given scale.
951 pub fn ts_to_time(ts: u64, base: u64, tb_num: u32, tb_den: u32) -> u64 {
952 let tb_num = tb_num as u64;
953 let tb_den = tb_den as u64;
954 let tmp = ts.checked_mul(base);
955 if let Some(tmp) = tmp {
956 let tmp2 = tmp.checked_mul(tb_num);
957 if let Some(tmp2) = tmp2 {
958 tmp2 / tb_den
959 } else {
960 (tmp / tb_den) * tb_num
961 }
962 } else {
963 let tmp = ts.checked_mul(tb_num);
964 if let Some(tmp) = tmp {
965 (tmp / tb_den) * base
966 } else {
967 (ts / tb_den) * base * tb_num
968 }
969 }
970 }
971 }
972
973 /// Decoded frame information.
974 #[allow(dead_code)]
975 #[derive(Clone)]
976 pub struct NAFrame {
977 /// Frame timestamp.
978 pub ts: NATimeInfo,
979 /// Frame ID.
980 pub id: i64,
981 buffer: NABufferType,
982 info: NACodecInfoRef,
983 /// Frame type.
984 pub frame_type: FrameType,
985 /// Keyframe flag.
986 pub key: bool,
987 // options: HashMap<String, NAValue>,
988 }
989
990 /// A specialised type for reference-counted `NAFrame`.
991 pub type NAFrameRef = Arc<NAFrame>;
992
993 fn get_plane_size(info: &NAVideoInfo, idx: usize) -> (usize, usize) {
994 let chromaton = info.get_format().get_chromaton(idx);
995 if chromaton.is_none() { return (0, 0); }
996 let (hs, vs) = chromaton.unwrap().get_subsampling();
997 let w = (info.get_width() + ((1 << hs) - 1)) >> hs;
998 let h = (info.get_height() + ((1 << vs) - 1)) >> vs;
999 (w, h)
1000 }
1001
1002 impl NAFrame {
1003 /// Constructs a new `NAFrame` instance.
1004 pub fn new(ts: NATimeInfo,
1005 ftype: FrameType,
1006 keyframe: bool,
1007 info: NACodecInfoRef,
1008 /*options: HashMap<String, NAValue>,*/
1009 buffer: NABufferType) -> Self {
1010 NAFrame { ts, id: 0, buffer, info, frame_type: ftype, key: keyframe/*, options*/ }
1011 }
1012 /// Returns frame format information.
1013 pub fn get_info(&self) -> NACodecInfoRef { self.info.clone() }
1014 /// Returns frame type.
1015 pub fn get_frame_type(&self) -> FrameType { self.frame_type }
1016 /// Reports whether the frame is a keyframe.
1017 pub fn is_keyframe(&self) -> bool { self.key }
1018 /// Sets new frame type.
1019 pub fn set_frame_type(&mut self, ftype: FrameType) { self.frame_type = ftype; }
1020 /// Sets keyframe flag.
1021 pub fn set_keyframe(&mut self, key: bool) { self.key = key; }
1022 /// Returns frame timestamp.
1023 pub fn get_time_information(&self) -> NATimeInfo { self.ts }
1024 /// Returns frame presentation time.
1025 pub fn get_pts(&self) -> Option<u64> { self.ts.get_pts() }
1026 /// Returns frame decoding time.
1027 pub fn get_dts(&self) -> Option<u64> { self.ts.get_dts() }
1028 /// Returns picture ID.
1029 pub fn get_id(&self) -> i64 { self.id }
1030 /// Returns frame display duration.
1031 pub fn get_duration(&self) -> Option<u64> { self.ts.get_duration() }
1032 /// Sets new presentation timestamp.
1033 pub fn set_pts(&mut self, pts: Option<u64>) { self.ts.set_pts(pts); }
1034 /// Sets new decoding timestamp.
1035 pub fn set_dts(&mut self, dts: Option<u64>) { self.ts.set_dts(dts); }
1036 /// Sets new picture ID.
1037 pub fn set_id(&mut self, id: i64) { self.id = id; }
1038 /// Sets new duration.
1039 pub fn set_duration(&mut self, dur: Option<u64>) { self.ts.set_duration(dur); }
1040
1041 /// Returns a reference to the frame data.
1042 pub fn get_buffer(&self) -> NABufferType { self.buffer.clone() }
1043
1044 /// Converts current instance into a reference-counted one.
1045 pub fn into_ref(self) -> NAFrameRef { Arc::new(self) }
1046
1047 /// Creates new frame with metadata from `NAPacket`.
1048 pub fn new_from_pkt(pkt: &NAPacket, info: NACodecInfoRef, buf: NABufferType) -> NAFrame {
1049 NAFrame::new(pkt.ts, FrameType::Other, pkt.keyframe, info, /*HashMap::new(),*/ buf)
1050 }
1051 }
1052
1053 impl fmt::Display for NAFrame {
1054 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1055 let mut ostr = format!("frame type {}", self.frame_type);
1056 if let Some(pts) = self.ts.pts { ostr = format!("{} pts {}", ostr, pts); }
1057 if let Some(dts) = self.ts.dts { ostr = format!("{} dts {}", ostr, dts); }
1058 if let Some(dur) = self.ts.duration { ostr = format!("{} duration {}", ostr, dur); }
1059 if self.key { ostr = format!("{} kf", ostr); }
1060 write!(f, "[{}]", ostr)
1061 }
1062 }
1063
1064 /// A list of possible stream types.
1065 #[derive(Debug,Clone,Copy,PartialEq)]
1066 #[allow(dead_code)]
1067 pub enum StreamType {
1068 /// Video stream.
1069 Video,
1070 /// Audio stream.
1071 Audio,
1072 /// Subtitles.
1073 Subtitles,
1074 /// Any data stream (or might be an unrecognized audio/video stream).
1075 Data,
1076 /// Nonexistent stream.
1077 None,
1078 }
1079
1080 impl fmt::Display for StreamType {
1081 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1082 match *self {
1083 StreamType::Video => write!(f, "Video"),
1084 StreamType::Audio => write!(f, "Audio"),
1085 StreamType::Subtitles => write!(f, "Subtitles"),
1086 StreamType::Data => write!(f, "Data"),
1087 StreamType::None => write!(f, "-"),
1088 }
1089 }
1090 }
1091
1092 /// Stream data.
1093 #[allow(dead_code)]
1094 #[derive(Clone)]
1095 pub struct NAStream {
1096 media_type: StreamType,
1097 /// Stream ID.
1098 pub id: u32,
1099 num: usize,
1100 info: NACodecInfoRef,
1101 /// Timebase numerator.
1102 pub tb_num: u32,
1103 /// Timebase denominator.
1104 pub tb_den: u32,
1105 }
1106
1107 /// A specialised reference-counted `NAStream` type.
1108 pub type NAStreamRef = Arc<NAStream>;
1109
1110 /// Downscales the timebase by its greatest common denominator.
1111 pub fn reduce_timebase(tb_num: u32, tb_den: u32) -> (u32, u32) {
1112 if tb_num == 0 { return (tb_num, tb_den); }
1113 if (tb_den % tb_num) == 0 { return (1, tb_den / tb_num); }
1114
1115 let mut a = tb_num;
1116 let mut b = tb_den;
1117
1118 while a != b {
1119 if a > b { a -= b; }
1120 else if b > a { b -= a; }
1121 }
1122
1123 (tb_num / a, tb_den / a)
1124 }
1125
1126 impl NAStream {
1127 /// Constructs a new `NAStream` instance.
1128 pub fn new(mt: StreamType, id: u32, info: NACodecInfo, tb_num: u32, tb_den: u32) -> Self {
1129 let (n, d) = reduce_timebase(tb_num, tb_den);
1130 NAStream { media_type: mt, id, num: 0, info: info.into_ref(), tb_num: n, tb_den: d }
1131 }
1132 /// Returns stream id.
1133 pub fn get_id(&self) -> u32 { self.id }
1134 /// Returns stream type.
1135 pub fn get_media_type(&self) -> StreamType { self.media_type }
1136 /// Returns stream number assigned by demuxer.
1137 pub fn get_num(&self) -> usize { self.num }
1138 /// Sets stream number.
1139 pub fn set_num(&mut self, num: usize) { self.num = num; }
1140 /// Returns codec information.
1141 pub fn get_info(&self) -> NACodecInfoRef { self.info.clone() }
1142 /// Returns stream timebase.
1143 pub fn get_timebase(&self) -> (u32, u32) { (self.tb_num, self.tb_den) }
1144 /// Sets new stream timebase.
1145 pub fn set_timebase(&mut self, tb_num: u32, tb_den: u32) {
1146 let (n, d) = reduce_timebase(tb_num, tb_den);
1147 self.tb_num = n;
1148 self.tb_den = d;
1149 }
1150 /// Converts current instance into a reference-counted one.
1151 pub fn into_ref(self) -> NAStreamRef { Arc::new(self) }
1152 }
1153
1154 impl fmt::Display for NAStream {
1155 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1156 write!(f, "({}#{} @ {}/{} - {})", self.media_type, self.id, self.tb_num, self.tb_den, self.info.get_properties())
1157 }
1158 }
1159
1160 /// Side data that may accompany demuxed data.
1161 #[derive(Clone)]
1162 pub enum NASideData {
1163 /// Palette information.
1164 ///
1165 /// This side data contains a flag signalling that palette has changed since previous time and a reference to the current palette.
1166 /// Palette is stored in 8-bit RGBA format.
1167 Palette(bool, Arc<[u8; 1024]>),
1168 /// Generic user data.
1169 UserData(Arc<Vec<u8>>),
1170 }
1171
1172 /// Packet with compressed data.
1173 #[allow(dead_code)]
1174 pub struct NAPacket {
1175 stream: NAStreamRef,
1176 /// Packet timestamp.
1177 pub ts: NATimeInfo,
1178 buffer: NABufferRef<Vec<u8>>,
1179 /// Keyframe flag.
1180 pub keyframe: bool,
1181 // options: HashMap<String, NAValue<'a>>,
1182 /// Packet side data (e.g. palette for paletted formats).
1183 pub side_data: Vec<NASideData>,
1184 }
1185
1186 impl NAPacket {
1187 /// Constructs a new `NAPacket` instance.
1188 pub fn new(str: NAStreamRef, ts: NATimeInfo, kf: bool, vec: Vec<u8>) -> Self {
1189 // let mut vec: Vec<u8> = Vec::new();
1190 // vec.resize(size, 0);
1191 NAPacket { stream: str, ts, keyframe: kf, buffer: NABufferRef::new(vec), side_data: Vec::new() }
1192 }
1193 /// Constructs a new `NAPacket` instance reusing a buffer reference.
1194 pub fn new_from_refbuf(str: NAStreamRef, ts: NATimeInfo, kf: bool, buffer: NABufferRef<Vec<u8>>) -> Self {
1195 NAPacket { stream: str, ts, keyframe: kf, buffer, side_data: Vec::new() }
1196 }
1197 /// Returns information about the stream packet belongs to.
1198 pub fn get_stream(&self) -> NAStreamRef { self.stream.clone() }
1199 /// Returns packet timestamp.
1200 pub fn get_time_information(&self) -> NATimeInfo { self.ts }
1201 /// Returns packet presentation timestamp.
1202 pub fn get_pts(&self) -> Option<u64> { self.ts.get_pts() }
1203 /// Returns packet decoding timestamp.
1204 pub fn get_dts(&self) -> Option<u64> { self.ts.get_dts() }
1205 /// Returns packet duration.
1206 pub fn get_duration(&self) -> Option<u64> { self.ts.get_duration() }
1207 /// Reports whether this is a keyframe packet.
1208 pub fn is_keyframe(&self) -> bool { self.keyframe }
1209 /// Returns a reference to packet data.
1210 pub fn get_buffer(&self) -> NABufferRef<Vec<u8>> { self.buffer.clone() }
1211 /// Adds side data for a packet.
1212 pub fn add_side_data(&mut self, side_data: NASideData) { self.side_data.push(side_data); }
1213 /// Assigns packet to a new stream.
1214 pub fn reassign(&mut self, str: NAStreamRef, ts: NATimeInfo) {
1215 self.stream = str;
1216 self.ts = ts;
1217 }
1218 }
1219
1220 impl Drop for NAPacket {
1221 fn drop(&mut self) {}
1222 }
1223
1224 impl fmt::Display for NAPacket {
1225 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1226 let mut ostr = format!("[pkt for {} size {}", self.stream, self.buffer.len());
1227 if let Some(pts) = self.ts.pts { ostr = format!("{} pts {}", ostr, pts); }
1228 if let Some(dts) = self.ts.dts { ostr = format!("{} dts {}", ostr, dts); }
1229 if let Some(dur) = self.ts.duration { ostr = format!("{} duration {}", ostr, dur); }
1230 if self.keyframe { ostr = format!("{} kf", ostr); }
1231 ostr += "]";
1232 write!(f, "{}", ostr)
1233 }
1234 }