core/frame: add helper functions for dealing with reference-counted buffer
[nihav.git] / nihav-core / src / frame.rs
1 //! Packets and decoded frames functionality.
2 use std::cmp::max;
3 //use std::collections::HashMap;
4 use std::fmt;
5 pub use std::sync::Arc;
6 pub use crate::formats::*;
7 pub use crate::refs::*;
8
9 /// Audio stream information.
10 #[allow(dead_code)]
11 #[derive(Clone,Copy,PartialEq)]
12 pub struct NAAudioInfo {
13 /// Sample rate.
14 pub sample_rate: u32,
15 /// Number of channels.
16 pub channels: u8,
17 /// Audio sample format.
18 pub format: NASoniton,
19 /// Length of one audio block in samples.
20 pub block_len: usize,
21 }
22
23 impl NAAudioInfo {
24 /// Constructs a new `NAAudioInfo` instance.
25 pub fn new(sr: u32, ch: u8, fmt: NASoniton, bl: usize) -> Self {
26 NAAudioInfo { sample_rate: sr, channels: ch, format: fmt, block_len: bl }
27 }
28 /// Returns audio sample rate.
29 pub fn get_sample_rate(&self) -> u32 { self.sample_rate }
30 /// Returns the number of channels.
31 pub fn get_channels(&self) -> u8 { self.channels }
32 /// Returns sample format.
33 pub fn get_format(&self) -> NASoniton { self.format }
34 /// Returns one audio block duration in samples.
35 pub fn get_block_len(&self) -> usize { self.block_len }
36 }
37
38 impl fmt::Display for NAAudioInfo {
39 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
40 write!(f, "{} Hz, {} ch", self.sample_rate, self.channels)
41 }
42 }
43
44 /// Video stream information.
45 #[allow(dead_code)]
46 #[derive(Clone,Copy,PartialEq)]
47 pub struct NAVideoInfo {
48 /// Picture width.
49 pub width: usize,
50 /// Picture height.
51 pub height: usize,
52 /// Picture is stored downside up.
53 pub flipped: bool,
54 /// Picture pixel format.
55 pub format: NAPixelFormaton,
56 }
57
58 impl NAVideoInfo {
59 /// Constructs a new `NAVideoInfo` instance.
60 pub fn new(w: usize, h: usize, flip: bool, fmt: NAPixelFormaton) -> Self {
61 NAVideoInfo { width: w, height: h, flipped: flip, format: fmt }
62 }
63 /// Returns picture width.
64 pub fn get_width(&self) -> usize { self.width as usize }
65 /// Returns picture height.
66 pub fn get_height(&self) -> usize { self.height as usize }
67 /// Returns picture orientation.
68 pub fn is_flipped(&self) -> bool { self.flipped }
69 /// Returns picture pixel format.
70 pub fn get_format(&self) -> NAPixelFormaton { self.format }
71 /// Sets new picture width.
72 pub fn set_width(&mut self, w: usize) { self.width = w; }
73 /// Sets new picture height.
74 pub fn set_height(&mut self, h: usize) { self.height = h; }
75 }
76
77 impl fmt::Display for NAVideoInfo {
78 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
79 write!(f, "{}x{}", self.width, self.height)
80 }
81 }
82
83 /// A list of possible stream information types.
84 #[derive(Clone,Copy,PartialEq)]
85 pub enum NACodecTypeInfo {
86 /// No codec present.
87 None,
88 /// Audio codec information.
89 Audio(NAAudioInfo),
90 /// Video codec information.
91 Video(NAVideoInfo),
92 }
93
94 impl NACodecTypeInfo {
95 /// Returns video stream information.
96 pub fn get_video_info(&self) -> Option<NAVideoInfo> {
97 match *self {
98 NACodecTypeInfo::Video(vinfo) => Some(vinfo),
99 _ => None,
100 }
101 }
102 /// Returns audio stream information.
103 pub fn get_audio_info(&self) -> Option<NAAudioInfo> {
104 match *self {
105 NACodecTypeInfo::Audio(ainfo) => Some(ainfo),
106 _ => None,
107 }
108 }
109 /// Reports whether the current stream is video stream.
110 pub fn is_video(&self) -> bool {
111 match *self {
112 NACodecTypeInfo::Video(_) => true,
113 _ => false,
114 }
115 }
116 /// Reports whether the current stream is audio stream.
117 pub fn is_audio(&self) -> bool {
118 match *self {
119 NACodecTypeInfo::Audio(_) => true,
120 _ => false,
121 }
122 }
123 }
124
125 impl fmt::Display for NACodecTypeInfo {
126 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
127 let ret = match *self {
128 NACodecTypeInfo::None => "".to_string(),
129 NACodecTypeInfo::Audio(fmt) => format!("{}", fmt),
130 NACodecTypeInfo::Video(fmt) => format!("{}", fmt),
131 };
132 write!(f, "{}", ret)
133 }
134 }
135
136 /// Decoded video frame.
137 ///
138 /// NihAV frames are stored in native type (8/16/32-bit elements) inside a single buffer.
139 /// In case of image with several components those components are stored sequentially and can be accessed in the buffer starting at corresponding component offset.
140 #[derive(Clone)]
141 pub struct NAVideoBuffer<T> {
142 info: NAVideoInfo,
143 data: NABufferRef<Vec<T>>,
144 offs: Vec<usize>,
145 strides: Vec<usize>,
146 }
147
148 impl<T: Clone> NAVideoBuffer<T> {
149 /// Returns the component offset (0 for all unavailable offsets).
150 pub fn get_offset(&self, idx: usize) -> usize {
151 if idx >= self.offs.len() { 0 }
152 else { self.offs[idx] }
153 }
154 /// Returns picture info.
155 pub fn get_info(&self) -> NAVideoInfo { self.info }
156 /// Returns an immutable reference to the data.
157 pub fn get_data(&self) -> &Vec<T> { self.data.as_ref() }
158 /// Returns a mutable reference to the data.
159 pub fn get_data_mut(&mut self) -> Option<&mut Vec<T>> { self.data.as_mut() }
160 /// Returns the number of components in picture format.
161 pub fn get_num_components(&self) -> usize { self.offs.len() }
162 /// Creates a copy of current `NAVideoBuffer`.
163 pub fn copy_buffer(&mut self) -> Self {
164 let mut data: Vec<T> = Vec::with_capacity(self.data.len());
165 data.clone_from(self.data.as_ref());
166 let mut offs: Vec<usize> = Vec::with_capacity(self.offs.len());
167 offs.clone_from(&self.offs);
168 let mut strides: Vec<usize> = Vec::with_capacity(self.strides.len());
169 strides.clone_from(&self.strides);
170 NAVideoBuffer { info: self.info, data: NABufferRef::new(data), offs, strides }
171 }
172 /// Returns stride (distance between subsequent lines) for the requested component.
173 pub fn get_stride(&self, idx: usize) -> usize {
174 if idx >= self.strides.len() { return 0; }
175 self.strides[idx]
176 }
177 /// Returns requested component dimensions.
178 pub fn get_dimensions(&self, idx: usize) -> (usize, usize) {
179 get_plane_size(&self.info, idx)
180 }
181 /// Converts current instance into buffer reference.
182 pub fn into_ref(self) -> NABufferRef<Self> {
183 NABufferRef::new(self)
184 }
185
186 fn print_contents(&self, datatype: &str) {
187 println!("{} video buffer size {}", datatype, self.data.len());
188 println!(" format {}", self.info);
189 print!(" offsets:");
190 for off in self.offs.iter() {
191 print!(" {}", *off);
192 }
193 println!();
194 print!(" strides:");
195 for stride in self.strides.iter() {
196 print!(" {}", *stride);
197 }
198 println!();
199 }
200 }
201
202 /// A specialised type for reference-counted `NAVideoBuffer`.
203 pub type NAVideoBufferRef<T> = NABufferRef<NAVideoBuffer<T>>;
204
205 /// Decoded audio frame.
206 ///
207 /// NihAV frames are stored in native type (8/16/32-bit elements) inside a single buffer.
208 /// In case of planar audio samples for each channel are stored sequentially and can be accessed in the buffer starting at corresponding channel offset.
209 #[derive(Clone)]
210 pub struct NAAudioBuffer<T> {
211 info: NAAudioInfo,
212 data: NABufferRef<Vec<T>>,
213 offs: Vec<usize>,
214 stride: usize,
215 step: usize,
216 chmap: NAChannelMap,
217 len: usize,
218 }
219
220 impl<T: Clone> NAAudioBuffer<T> {
221 /// Returns the start position of requested channel data.
222 pub fn get_offset(&self, idx: usize) -> usize {
223 if idx >= self.offs.len() { 0 }
224 else { self.offs[idx] }
225 }
226 /// Returns the distance between the start of one channel and the next one.
227 pub fn get_stride(&self) -> usize { self.stride }
228 /// Returns the distance between the samples in one channel.
229 pub fn get_step(&self) -> usize { self.step }
230 /// Returns audio format information.
231 pub fn get_info(&self) -> NAAudioInfo { self.info }
232 /// Returns channel map.
233 pub fn get_chmap(&self) -> &NAChannelMap { &self.chmap }
234 /// Returns an immutable reference to the data.
235 pub fn get_data(&self) -> &Vec<T> { self.data.as_ref() }
236 /// Returns reference to the data.
237 pub fn get_data_ref(&self) -> NABufferRef<Vec<T>> { self.data.clone() }
238 /// Returns a mutable reference to the data.
239 pub fn get_data_mut(&mut self) -> Option<&mut Vec<T>> { self.data.as_mut() }
240 /// Clones current `NAAudioBuffer` into a new one.
241 pub fn copy_buffer(&mut self) -> Self {
242 let mut data: Vec<T> = Vec::with_capacity(self.data.len());
243 data.clone_from(self.data.as_ref());
244 let mut offs: Vec<usize> = Vec::with_capacity(self.offs.len());
245 offs.clone_from(&self.offs);
246 NAAudioBuffer { info: self.info, data: NABufferRef::new(data), offs, chmap: self.get_chmap().clone(), len: self.len, stride: self.stride, step: self.step }
247 }
248 /// Return the length of frame in samples.
249 pub fn get_length(&self) -> usize { self.len }
250
251 fn print_contents(&self, datatype: &str) {
252 println!("Audio buffer with {} data, stride {}, step {}", datatype, self.stride, self.step);
253 println!(" format {}", self.info);
254 println!(" channel map {}", self.chmap);
255 print!(" offsets:");
256 for off in self.offs.iter() {
257 print!(" {}", *off);
258 }
259 println!();
260 }
261 }
262
263 impl NAAudioBuffer<u8> {
264 /// Constructs a new `NAAudioBuffer` instance.
265 pub fn new_from_buf(info: NAAudioInfo, data: NABufferRef<Vec<u8>>, chmap: NAChannelMap) -> Self {
266 let len = data.len();
267 NAAudioBuffer { info, data, chmap, offs: Vec::new(), len, stride: 0, step: 0 }
268 }
269 }
270
271 /// A list of possible decoded frame types.
272 #[derive(Clone)]
273 pub enum NABufferType {
274 /// 8-bit video buffer.
275 Video (NAVideoBufferRef<u8>),
276 /// 16-bit video buffer (i.e. every component or packed pixel fits into 16 bits).
277 Video16 (NAVideoBufferRef<u16>),
278 /// 32-bit video buffer (i.e. every component or packed pixel fits into 32 bits).
279 Video32 (NAVideoBufferRef<u32>),
280 /// Packed video buffer.
281 VideoPacked(NAVideoBufferRef<u8>),
282 /// Audio buffer with 8-bit unsigned integer audio.
283 AudioU8 (NAAudioBuffer<u8>),
284 /// Audio buffer with 16-bit signed integer audio.
285 AudioI16 (NAAudioBuffer<i16>),
286 /// Audio buffer with 32-bit signed integer audio.
287 AudioI32 (NAAudioBuffer<i32>),
288 /// Audio buffer with 32-bit floating point audio.
289 AudioF32 (NAAudioBuffer<f32>),
290 /// Packed audio buffer.
291 AudioPacked(NAAudioBuffer<u8>),
292 /// Buffer with generic data (e.g. subtitles).
293 Data (NABufferRef<Vec<u8>>),
294 /// No data present.
295 None,
296 }
297
298 impl NABufferType {
299 /// Returns the offset to the requested component or channel.
300 pub fn get_offset(&self, idx: usize) -> usize {
301 match *self {
302 NABufferType::Video(ref vb) => vb.get_offset(idx),
303 NABufferType::Video16(ref vb) => vb.get_offset(idx),
304 NABufferType::Video32(ref vb) => vb.get_offset(idx),
305 NABufferType::VideoPacked(ref vb) => vb.get_offset(idx),
306 NABufferType::AudioU8(ref ab) => ab.get_offset(idx),
307 NABufferType::AudioI16(ref ab) => ab.get_offset(idx),
308 NABufferType::AudioI32(ref ab) => ab.get_offset(idx),
309 NABufferType::AudioF32(ref ab) => ab.get_offset(idx),
310 NABufferType::AudioPacked(ref ab) => ab.get_offset(idx),
311 _ => 0,
312 }
313 }
314 /// Returns information for video frames.
315 pub fn get_video_info(&self) -> Option<NAVideoInfo> {
316 match *self {
317 NABufferType::Video(ref vb) => Some(vb.get_info()),
318 NABufferType::Video16(ref vb) => Some(vb.get_info()),
319 NABufferType::Video32(ref vb) => Some(vb.get_info()),
320 NABufferType::VideoPacked(ref vb) => Some(vb.get_info()),
321 _ => None,
322 }
323 }
324 /// Returns reference to 8-bit (or packed) video buffer.
325 pub fn get_vbuf(&self) -> Option<NAVideoBufferRef<u8>> {
326 match *self {
327 NABufferType::Video(ref vb) => Some(vb.clone()),
328 NABufferType::VideoPacked(ref vb) => Some(vb.clone()),
329 _ => None,
330 }
331 }
332 /// Returns reference to 16-bit video buffer.
333 pub fn get_vbuf16(&self) -> Option<NAVideoBufferRef<u16>> {
334 match *self {
335 NABufferType::Video16(ref vb) => Some(vb.clone()),
336 _ => None,
337 }
338 }
339 /// Returns reference to 32-bit video buffer.
340 pub fn get_vbuf32(&self) -> Option<NAVideoBufferRef<u32>> {
341 match *self {
342 NABufferType::Video32(ref vb) => Some(vb.clone()),
343 _ => None,
344 }
345 }
346 /// Returns information for audio frames.
347 pub fn get_audio_info(&self) -> Option<NAAudioInfo> {
348 match *self {
349 NABufferType::AudioU8(ref ab) => Some(ab.get_info()),
350 NABufferType::AudioI16(ref ab) => Some(ab.get_info()),
351 NABufferType::AudioI32(ref ab) => Some(ab.get_info()),
352 NABufferType::AudioF32(ref ab) => Some(ab.get_info()),
353 NABufferType::AudioPacked(ref ab) => Some(ab.get_info()),
354 _ => None,
355 }
356 }
357 /// Returns audio channel map.
358 pub fn get_chmap(&self) -> Option<&NAChannelMap> {
359 match *self {
360 NABufferType::AudioU8(ref ab) => Some(ab.get_chmap()),
361 NABufferType::AudioI16(ref ab) => Some(ab.get_chmap()),
362 NABufferType::AudioI32(ref ab) => Some(ab.get_chmap()),
363 NABufferType::AudioF32(ref ab) => Some(ab.get_chmap()),
364 NABufferType::AudioPacked(ref ab) => Some(ab.get_chmap()),
365 _ => None,
366 }
367 }
368 /// Returns audio frame duration in samples.
369 pub fn get_audio_length(&self) -> usize {
370 match *self {
371 NABufferType::AudioU8(ref ab) => ab.get_length(),
372 NABufferType::AudioI16(ref ab) => ab.get_length(),
373 NABufferType::AudioI32(ref ab) => ab.get_length(),
374 NABufferType::AudioF32(ref ab) => ab.get_length(),
375 NABufferType::AudioPacked(ref ab) => ab.get_length(),
376 _ => 0,
377 }
378 }
379 /// Returns the distance between starts of two channels.
380 pub fn get_audio_stride(&self) -> usize {
381 match *self {
382 NABufferType::AudioU8(ref ab) => ab.get_stride(),
383 NABufferType::AudioI16(ref ab) => ab.get_stride(),
384 NABufferType::AudioI32(ref ab) => ab.get_stride(),
385 NABufferType::AudioF32(ref ab) => ab.get_stride(),
386 NABufferType::AudioPacked(ref ab) => ab.get_stride(),
387 _ => 0,
388 }
389 }
390 /// Returns the distance between two samples in one channel.
391 pub fn get_audio_step(&self) -> usize {
392 match *self {
393 NABufferType::AudioU8(ref ab) => ab.get_step(),
394 NABufferType::AudioI16(ref ab) => ab.get_step(),
395 NABufferType::AudioI32(ref ab) => ab.get_step(),
396 NABufferType::AudioF32(ref ab) => ab.get_step(),
397 NABufferType::AudioPacked(ref ab) => ab.get_step(),
398 _ => 0,
399 }
400 }
401 /// Returns reference to 8-bit (or packed) audio buffer.
402 pub fn get_abuf_u8(&self) -> Option<NAAudioBuffer<u8>> {
403 match *self {
404 NABufferType::AudioU8(ref ab) => Some(ab.clone()),
405 NABufferType::AudioPacked(ref ab) => Some(ab.clone()),
406 _ => None,
407 }
408 }
409 /// Returns reference to 16-bit audio buffer.
410 pub fn get_abuf_i16(&self) -> Option<NAAudioBuffer<i16>> {
411 match *self {
412 NABufferType::AudioI16(ref ab) => Some(ab.clone()),
413 _ => None,
414 }
415 }
416 /// Returns reference to 32-bit integer audio buffer.
417 pub fn get_abuf_i32(&self) -> Option<NAAudioBuffer<i32>> {
418 match *self {
419 NABufferType::AudioI32(ref ab) => Some(ab.clone()),
420 _ => None,
421 }
422 }
423 /// Returns reference to 32-bit floating point audio buffer.
424 pub fn get_abuf_f32(&self) -> Option<NAAudioBuffer<f32>> {
425 match *self {
426 NABufferType::AudioF32(ref ab) => Some(ab.clone()),
427 _ => None,
428 }
429 }
430 /// Prints internal buffer layout.
431 pub fn print_buffer_metadata(&self) {
432 match *self {
433 NABufferType::Video(ref buf) => buf.print_contents("8-bit"),
434 NABufferType::Video16(ref buf) => buf.print_contents("16-bit"),
435 NABufferType::Video32(ref buf) => buf.print_contents("32-bit"),
436 NABufferType::VideoPacked(ref buf) => buf.print_contents("packed"),
437 NABufferType::AudioU8(ref buf) => buf.print_contents("8-bit unsigned integer"),
438 NABufferType::AudioI16(ref buf) => buf.print_contents("16-bit integer"),
439 NABufferType::AudioI32(ref buf) => buf.print_contents("32-bit integer"),
440 NABufferType::AudioF32(ref buf) => buf.print_contents("32-bit float"),
441 NABufferType::AudioPacked(ref buf) => buf.print_contents("packed"),
442 NABufferType::Data(ref buf) => { println!("Data buffer, len = {}", buf.len()); },
443 NABufferType::None => { println!("No buffer"); },
444 };
445 }
446 }
447
448 const NA_SIMPLE_VFRAME_COMPONENTS: usize = 4;
449 /// Simplified decoded frame data.
450 pub struct NASimpleVideoFrame<'a, T: Copy> {
451 /// Widths of each picture component.
452 pub width: [usize; NA_SIMPLE_VFRAME_COMPONENTS],
453 /// Heights of each picture component.
454 pub height: [usize; NA_SIMPLE_VFRAME_COMPONENTS],
455 /// Orientation (upside-down or downside-up) flag.
456 pub flip: bool,
457 /// Strides for each component.
458 pub stride: [usize; NA_SIMPLE_VFRAME_COMPONENTS],
459 /// Start of each component.
460 pub offset: [usize; NA_SIMPLE_VFRAME_COMPONENTS],
461 /// Number of components.
462 pub components: usize,
463 /// Pointer to the picture pixel data.
464 pub data: &'a mut [T],
465 }
466
467 impl<'a, T:Copy> NASimpleVideoFrame<'a, T> {
468 /// Constructs a new instance of `NASimpleVideoFrame` from `NAVideoBuffer`.
469 pub fn from_video_buf(vbuf: &'a mut NAVideoBuffer<T>) -> Option<Self> {
470 let vinfo = vbuf.get_info();
471 let components = vinfo.format.components as usize;
472 if components > NA_SIMPLE_VFRAME_COMPONENTS {
473 return None;
474 }
475 let mut w: [usize; NA_SIMPLE_VFRAME_COMPONENTS] = [0; NA_SIMPLE_VFRAME_COMPONENTS];
476 let mut h: [usize; NA_SIMPLE_VFRAME_COMPONENTS] = [0; NA_SIMPLE_VFRAME_COMPONENTS];
477 let mut s: [usize; NA_SIMPLE_VFRAME_COMPONENTS] = [0; NA_SIMPLE_VFRAME_COMPONENTS];
478 let mut o: [usize; NA_SIMPLE_VFRAME_COMPONENTS] = [0; NA_SIMPLE_VFRAME_COMPONENTS];
479 for comp in 0..components {
480 let (width, height) = vbuf.get_dimensions(comp);
481 w[comp] = width;
482 h[comp] = height;
483 s[comp] = vbuf.get_stride(comp);
484 o[comp] = vbuf.get_offset(comp);
485 }
486 let flip = vinfo.flipped;
487 Some(NASimpleVideoFrame {
488 width: w,
489 height: h,
490 flip,
491 stride: s,
492 offset: o,
493 components,
494 data: vbuf.data.as_mut_slice(),
495 })
496 }
497 }
498
499 /// A list of possible frame allocator errors.
500 #[derive(Debug,Clone,Copy,PartialEq)]
501 pub enum AllocatorError {
502 /// Requested picture dimensions are too large.
503 TooLargeDimensions,
504 /// Invalid input format.
505 FormatError,
506 }
507
508 /// Constructs a new video buffer with requested format.
509 ///
510 /// `align` is power of two alignment for image. E.g. the value of 5 means that frame dimensions will be padded to be multiple of 32.
511 pub fn alloc_video_buffer(vinfo: NAVideoInfo, align: u8) -> Result<NABufferType, AllocatorError> {
512 let fmt = &vinfo.format;
513 let mut new_size: usize = 0;
514 let mut offs: Vec<usize> = Vec::new();
515 let mut strides: Vec<usize> = Vec::new();
516
517 for i in 0..fmt.get_num_comp() {
518 if fmt.get_chromaton(i) == None { return Err(AllocatorError::FormatError); }
519 }
520
521 let align_mod = ((1 << align) as usize) - 1;
522 let width = ((vinfo.width as usize) + align_mod) & !align_mod;
523 let height = ((vinfo.height as usize) + align_mod) & !align_mod;
524 let mut max_depth = 0;
525 let mut all_packed = true;
526 let mut all_bytealigned = true;
527 for i in 0..fmt.get_num_comp() {
528 let ochr = fmt.get_chromaton(i);
529 if ochr.is_none() { continue; }
530 let chr = ochr.unwrap();
531 if !chr.is_packed() {
532 all_packed = false;
533 } else if ((chr.get_shift() + chr.get_depth()) & 7) != 0 {
534 all_bytealigned = false;
535 }
536 max_depth = max(max_depth, chr.get_depth());
537 }
538 let unfit_elem_size = match fmt.get_elem_size() {
539 2 | 4 => false,
540 _ => true,
541 };
542
543 //todo semi-packed like NV12
544 if fmt.is_paletted() {
545 //todo various-sized palettes?
546 let stride = vinfo.get_format().get_chromaton(0).unwrap().get_linesize(width);
547 let pic_sz = stride.checked_mul(height);
548 if pic_sz == None { return Err(AllocatorError::TooLargeDimensions); }
549 let pal_size = 256 * (fmt.get_elem_size() as usize);
550 let new_size = pic_sz.unwrap().checked_add(pal_size);
551 if new_size == None { return Err(AllocatorError::TooLargeDimensions); }
552 offs.push(0);
553 offs.push(stride * height);
554 strides.push(stride);
555 let data: Vec<u8> = vec![0; new_size.unwrap()];
556 let buf: NAVideoBuffer<u8> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
557 Ok(NABufferType::Video(buf.into_ref()))
558 } else if !all_packed {
559 for i in 0..fmt.get_num_comp() {
560 let ochr = fmt.get_chromaton(i);
561 if ochr.is_none() { continue; }
562 let chr = ochr.unwrap();
563 offs.push(new_size as usize);
564 let stride = chr.get_linesize(width);
565 let cur_h = chr.get_height(height);
566 let cur_sz = stride.checked_mul(cur_h);
567 if cur_sz == None { return Err(AllocatorError::TooLargeDimensions); }
568 let new_sz = new_size.checked_add(cur_sz.unwrap());
569 if new_sz == None { return Err(AllocatorError::TooLargeDimensions); }
570 new_size = new_sz.unwrap();
571 strides.push(stride);
572 }
573 if max_depth <= 8 {
574 let data: Vec<u8> = vec![0; new_size];
575 let buf: NAVideoBuffer<u8> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
576 Ok(NABufferType::Video(buf.into_ref()))
577 } else if max_depth <= 16 {
578 let data: Vec<u16> = vec![0; new_size];
579 let buf: NAVideoBuffer<u16> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
580 Ok(NABufferType::Video16(buf.into_ref()))
581 } else {
582 let data: Vec<u32> = vec![0; new_size];
583 let buf: NAVideoBuffer<u32> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
584 Ok(NABufferType::Video32(buf.into_ref()))
585 }
586 } else if all_bytealigned || unfit_elem_size {
587 let elem_sz = fmt.get_elem_size();
588 let line_sz = width.checked_mul(elem_sz as usize);
589 if line_sz == None { return Err(AllocatorError::TooLargeDimensions); }
590 let new_sz = line_sz.unwrap().checked_mul(height);
591 if new_sz == None { return Err(AllocatorError::TooLargeDimensions); }
592 new_size = new_sz.unwrap();
593 let data: Vec<u8> = vec![0; new_size];
594 strides.push(line_sz.unwrap());
595 let buf: NAVideoBuffer<u8> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
596 Ok(NABufferType::VideoPacked(buf.into_ref()))
597 } else {
598 let elem_sz = fmt.get_elem_size();
599 let new_sz = width.checked_mul(height);
600 if new_sz == None { return Err(AllocatorError::TooLargeDimensions); }
601 new_size = new_sz.unwrap();
602 match elem_sz {
603 2 => {
604 let data: Vec<u16> = vec![0; new_size];
605 strides.push(width);
606 let buf: NAVideoBuffer<u16> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
607 Ok(NABufferType::Video16(buf.into_ref()))
608 },
609 4 => {
610 let data: Vec<u32> = vec![0; new_size];
611 strides.push(width);
612 let buf: NAVideoBuffer<u32> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides };
613 Ok(NABufferType::Video32(buf.into_ref()))
614 },
615 _ => unreachable!(),
616 }
617 }
618 }
619
620 /// Constructs a new audio buffer for the requested format and length.
621 #[allow(clippy::collapsible_if)]
622 pub fn alloc_audio_buffer(ainfo: NAAudioInfo, nsamples: usize, chmap: NAChannelMap) -> Result<NABufferType, AllocatorError> {
623 let mut offs: Vec<usize> = Vec::new();
624 if ainfo.format.is_planar() || ((ainfo.format.get_bits() % 8) == 0) {
625 let len = nsamples.checked_mul(ainfo.channels as usize);
626 if len == None { return Err(AllocatorError::TooLargeDimensions); }
627 let length = len.unwrap();
628 let stride;
629 let step;
630 if ainfo.format.is_planar() {
631 stride = nsamples;
632 step = 1;
633 for i in 0..ainfo.channels {
634 offs.push((i as usize) * stride);
635 }
636 } else {
637 stride = 1;
638 step = ainfo.channels as usize;
639 for i in 0..ainfo.channels {
640 offs.push(i as usize);
641 }
642 }
643 if ainfo.format.is_float() {
644 if ainfo.format.get_bits() == 32 {
645 let data: Vec<f32> = vec![0.0; length];
646 let buf: NAAudioBuffer<f32> = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs, chmap, len: nsamples, stride, step };
647 Ok(NABufferType::AudioF32(buf))
648 } else {
649 Err(AllocatorError::TooLargeDimensions)
650 }
651 } else {
652 if ainfo.format.get_bits() == 8 && !ainfo.format.is_signed() {
653 let data: Vec<u8> = vec![0; length];
654 let buf: NAAudioBuffer<u8> = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs, chmap, len: nsamples, stride, step };
655 Ok(NABufferType::AudioU8(buf))
656 } else if ainfo.format.get_bits() == 16 && ainfo.format.is_signed() {
657 let data: Vec<i16> = vec![0; length];
658 let buf: NAAudioBuffer<i16> = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs, chmap, len: nsamples, stride, step };
659 Ok(NABufferType::AudioI16(buf))
660 } else {
661 Err(AllocatorError::TooLargeDimensions)
662 }
663 }
664 } else {
665 let len = nsamples.checked_mul(ainfo.channels as usize);
666 if len == None { return Err(AllocatorError::TooLargeDimensions); }
667 let length = ainfo.format.get_audio_size(len.unwrap() as u64);
668 let data: Vec<u8> = vec![0; length];
669 let buf: NAAudioBuffer<u8> = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs, chmap, len: nsamples, stride: 0, step: 0 };
670 Ok(NABufferType::AudioPacked(buf))
671 }
672 }
673
674 /// Constructs a new buffer for generic data.
675 pub fn alloc_data_buffer(size: usize) -> Result<NABufferType, AllocatorError> {
676 let data: Vec<u8> = vec![0; size];
677 let buf: NABufferRef<Vec<u8>> = NABufferRef::new(data);
678 Ok(NABufferType::Data(buf))
679 }
680
681 /// Creates a clone of current buffer.
682 pub fn copy_buffer(buf: NABufferType) -> NABufferType {
683 buf.clone()
684 }
685
686 /// Video frame pool.
687 ///
688 /// This structure allows codec to effectively reuse old frames instead of allocating and de-allocating frames every time.
689 /// Caller can also reserve some frames for its own purposes e.g. display queue.
690 pub struct NAVideoBufferPool<T:Copy> {
691 pool: Vec<NAVideoBufferRef<T>>,
692 max_len: usize,
693 add_len: usize,
694 }
695
696 impl<T:Copy> NAVideoBufferPool<T> {
697 /// Constructs a new `NAVideoBufferPool` instance.
698 pub fn new(max_len: usize) -> Self {
699 Self {
700 pool: Vec::with_capacity(max_len),
701 max_len,
702 add_len: 0,
703 }
704 }
705 /// Sets the number of buffers reserved for the user.
706 pub fn set_dec_bufs(&mut self, add_len: usize) {
707 self.add_len = add_len;
708 }
709 /// Returns an unused buffer from the pool.
710 pub fn get_free(&mut self) -> Option<NAVideoBufferRef<T>> {
711 for e in self.pool.iter() {
712 if e.get_num_refs() == 1 {
713 return Some(e.clone());
714 }
715 }
716 None
717 }
718 /// Clones provided frame data into a free pool frame.
719 pub fn get_copy(&mut self, rbuf: &NAVideoBufferRef<T>) -> Option<NAVideoBufferRef<T>> {
720 let mut dbuf = self.get_free()?;
721 dbuf.data.copy_from_slice(&rbuf.data);
722 Some(dbuf)
723 }
724 /// Clears the pool from all frames.
725 pub fn reset(&mut self) {
726 self.pool.truncate(0);
727 }
728 }
729
730 impl NAVideoBufferPool<u8> {
731 /// Allocates the target amount of video frames using [`alloc_video_buffer`].
732 ///
733 /// [`alloc_video_buffer`]: ./fn.alloc_video_buffer.html
734 pub fn prealloc_video(&mut self, vinfo: NAVideoInfo, align: u8) -> Result<(), AllocatorError> {
735 let nbufs = self.max_len + self.add_len - self.pool.len();
736 for _ in 0..nbufs {
737 let vbuf = alloc_video_buffer(vinfo, align)?;
738 if let NABufferType::Video(buf) = vbuf {
739 self.pool.push(buf);
740 } else if let NABufferType::VideoPacked(buf) = vbuf {
741 self.pool.push(buf);
742 } else {
743 return Err(AllocatorError::FormatError);
744 }
745 }
746 Ok(())
747 }
748 }
749
750 impl NAVideoBufferPool<u16> {
751 /// Allocates the target amount of video frames using [`alloc_video_buffer`].
752 ///
753 /// [`alloc_video_buffer`]: ./fn.alloc_video_buffer.html
754 pub fn prealloc_video(&mut self, vinfo: NAVideoInfo, align: u8) -> Result<(), AllocatorError> {
755 let nbufs = self.max_len + self.add_len - self.pool.len();
756 for _ in 0..nbufs {
757 let vbuf = alloc_video_buffer(vinfo, align)?;
758 if let NABufferType::Video16(buf) = vbuf {
759 self.pool.push(buf);
760 } else {
761 return Err(AllocatorError::FormatError);
762 }
763 }
764 Ok(())
765 }
766 }
767
768 impl NAVideoBufferPool<u32> {
769 /// Allocates the target amount of video frames using [`alloc_video_buffer`].
770 ///
771 /// [`alloc_video_buffer`]: ./fn.alloc_video_buffer.html
772 pub fn prealloc_video(&mut self, vinfo: NAVideoInfo, align: u8) -> Result<(), AllocatorError> {
773 let nbufs = self.max_len + self.add_len - self.pool.len();
774 for _ in 0..nbufs {
775 let vbuf = alloc_video_buffer(vinfo, align)?;
776 if let NABufferType::Video32(buf) = vbuf {
777 self.pool.push(buf);
778 } else {
779 return Err(AllocatorError::FormatError);
780 }
781 }
782 Ok(())
783 }
784 }
785
786 /// Information about codec contained in a stream.
787 #[allow(dead_code)]
788 #[derive(Clone)]
789 pub struct NACodecInfo {
790 name: &'static str,
791 properties: NACodecTypeInfo,
792 extradata: Option<Arc<Vec<u8>>>,
793 }
794
795 /// A specialised type for reference-counted `NACodecInfo`.
796 pub type NACodecInfoRef = Arc<NACodecInfo>;
797
798 impl NACodecInfo {
799 /// Constructs a new instance of `NACodecInfo`.
800 pub fn new(name: &'static str, p: NACodecTypeInfo, edata: Option<Vec<u8>>) -> Self {
801 let extradata = match edata {
802 None => None,
803 Some(vec) => Some(Arc::new(vec)),
804 };
805 NACodecInfo { name, properties: p, extradata }
806 }
807 /// Constructs a new reference-counted instance of `NACodecInfo`.
808 pub fn new_ref(name: &'static str, p: NACodecTypeInfo, edata: Option<Arc<Vec<u8>>>) -> Self {
809 NACodecInfo { name, properties: p, extradata: edata }
810 }
811 /// Converts current instance into a reference-counted one.
812 pub fn into_ref(self) -> NACodecInfoRef { Arc::new(self) }
813 /// Returns codec information.
814 pub fn get_properties(&self) -> NACodecTypeInfo { self.properties }
815 /// Returns additional initialisation data required by the codec.
816 pub fn get_extradata(&self) -> Option<Arc<Vec<u8>>> {
817 if let Some(ref vec) = self.extradata { return Some(vec.clone()); }
818 None
819 }
820 /// Returns codec name.
821 pub fn get_name(&self) -> &'static str { self.name }
822 /// Reports whether it is a video codec.
823 pub fn is_video(&self) -> bool {
824 if let NACodecTypeInfo::Video(_) = self.properties { return true; }
825 false
826 }
827 /// Reports whether it is an audio codec.
828 pub fn is_audio(&self) -> bool {
829 if let NACodecTypeInfo::Audio(_) = self.properties { return true; }
830 false
831 }
832 /// Constructs a new empty reference-counted instance of `NACodecInfo`.
833 pub fn new_dummy() -> Arc<Self> {
834 Arc::new(DUMMY_CODEC_INFO)
835 }
836 /// Updates codec infomation.
837 pub fn replace_info(&self, p: NACodecTypeInfo) -> Arc<Self> {
838 Arc::new(NACodecInfo { name: self.name, properties: p, extradata: self.extradata.clone() })
839 }
840 }
841
842 impl Default for NACodecInfo {
843 fn default() -> Self { DUMMY_CODEC_INFO }
844 }
845
846 impl fmt::Display for NACodecInfo {
847 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
848 let edata = match self.extradata.clone() {
849 None => "no extradata".to_string(),
850 Some(v) => format!("{} byte(s) of extradata", v.len()),
851 };
852 write!(f, "{}: {} {}", self.name, self.properties, edata)
853 }
854 }
855
856 /// Default empty codec information.
857 pub const DUMMY_CODEC_INFO: NACodecInfo = NACodecInfo {
858 name: "none",
859 properties: NACodecTypeInfo::None,
860 extradata: None };
861
862 /// A list of recognized frame types.
863 #[derive(Debug,Clone,Copy,PartialEq)]
864 #[allow(dead_code)]
865 pub enum FrameType {
866 /// Intra frame type.
867 I,
868 /// Inter frame type.
869 P,
870 /// Bidirectionally predicted frame.
871 B,
872 /// Skip frame.
873 ///
874 /// When such frame is encountered then last frame should be used again if it is needed.
875 Skip,
876 /// Some other frame type.
877 Other,
878 }
879
880 impl fmt::Display for FrameType {
881 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
882 match *self {
883 FrameType::I => write!(f, "I"),
884 FrameType::P => write!(f, "P"),
885 FrameType::B => write!(f, "B"),
886 FrameType::Skip => write!(f, "skip"),
887 FrameType::Other => write!(f, "x"),
888 }
889 }
890 }
891
892 /// Timestamp information.
893 #[derive(Debug,Clone,Copy)]
894 pub struct NATimeInfo {
895 /// Presentation timestamp.
896 pub pts: Option<u64>,
897 /// Decode timestamp.
898 pub dts: Option<u64>,
899 /// Duration (in timebase units).
900 pub duration: Option<u64>,
901 /// Timebase numerator.
902 pub tb_num: u32,
903 /// Timebase denominator.
904 pub tb_den: u32,
905 }
906
907 impl NATimeInfo {
908 /// Constructs a new `NATimeInfo` instance.
909 pub fn new(pts: Option<u64>, dts: Option<u64>, duration: Option<u64>, tb_num: u32, tb_den: u32) -> Self {
910 NATimeInfo { pts, dts, duration, tb_num, tb_den }
911 }
912 /// Returns presentation timestamp.
913 pub fn get_pts(&self) -> Option<u64> { self.pts }
914 /// Returns decoding timestamp.
915 pub fn get_dts(&self) -> Option<u64> { self.dts }
916 /// Returns duration.
917 pub fn get_duration(&self) -> Option<u64> { self.duration }
918 /// Sets new presentation timestamp.
919 pub fn set_pts(&mut self, pts: Option<u64>) { self.pts = pts; }
920 /// Sets new decoding timestamp.
921 pub fn set_dts(&mut self, dts: Option<u64>) { self.dts = dts; }
922 /// Sets new duration.
923 pub fn set_duration(&mut self, dur: Option<u64>) { self.duration = dur; }
924
925 /// Converts time in given scale into timestamp in given base.
926 pub fn time_to_ts(time: u64, base: u64, tb_num: u32, tb_den: u32) -> u64 {
927 let tb_num = tb_num as u64;
928 let tb_den = tb_den as u64;
929 let tmp = time.checked_mul(tb_num);
930 if let Some(tmp) = tmp {
931 tmp / base / tb_den
932 } else {
933 let tmp = time.checked_mul(tb_num);
934 if let Some(tmp) = tmp {
935 tmp / base / tb_den
936 } else {
937 let coarse = time / base;
938 let tmp = coarse.checked_mul(tb_num);
939 if let Some(tmp) = tmp {
940 tmp / tb_den
941 } else {
942 (coarse / tb_den) * tb_num
943 }
944 }
945 }
946 }
947 /// Converts timestamp in given base into time in given scale.
948 pub fn ts_to_time(ts: u64, base: u64, tb_num: u32, tb_den: u32) -> u64 {
949 let tb_num = tb_num as u64;
950 let tb_den = tb_den as u64;
951 let tmp = ts.checked_mul(base);
952 if let Some(tmp) = tmp {
953 let tmp2 = tmp.checked_mul(tb_num);
954 if let Some(tmp2) = tmp2 {
955 tmp2 / tb_den
956 } else {
957 (tmp / tb_den) * tb_num
958 }
959 } else {
960 let tmp = ts.checked_mul(tb_num);
961 if let Some(tmp) = tmp {
962 (tmp / tb_den) * base
963 } else {
964 (ts / tb_den) * base * tb_num
965 }
966 }
967 }
968 }
969
970 /// Decoded frame information.
971 #[allow(dead_code)]
972 #[derive(Clone)]
973 pub struct NAFrame {
974 /// Frame timestamp.
975 pub ts: NATimeInfo,
976 /// Frame ID.
977 pub id: i64,
978 buffer: NABufferType,
979 info: NACodecInfoRef,
980 /// Frame type.
981 pub frame_type: FrameType,
982 /// Keyframe flag.
983 pub key: bool,
984 // options: HashMap<String, NAValue>,
985 }
986
987 /// A specialised type for reference-counted `NAFrame`.
988 pub type NAFrameRef = Arc<NAFrame>;
989
990 fn get_plane_size(info: &NAVideoInfo, idx: usize) -> (usize, usize) {
991 let chromaton = info.get_format().get_chromaton(idx);
992 if chromaton.is_none() { return (0, 0); }
993 let (hs, vs) = chromaton.unwrap().get_subsampling();
994 let w = (info.get_width() + ((1 << hs) - 1)) >> hs;
995 let h = (info.get_height() + ((1 << vs) - 1)) >> vs;
996 (w, h)
997 }
998
999 impl NAFrame {
1000 /// Constructs a new `NAFrame` instance.
1001 pub fn new(ts: NATimeInfo,
1002 ftype: FrameType,
1003 keyframe: bool,
1004 info: NACodecInfoRef,
1005 /*options: HashMap<String, NAValue>,*/
1006 buffer: NABufferType) -> Self {
1007 NAFrame { ts, id: 0, buffer, info, frame_type: ftype, key: keyframe/*, options*/ }
1008 }
1009 /// Returns frame format information.
1010 pub fn get_info(&self) -> NACodecInfoRef { self.info.clone() }
1011 /// Returns frame type.
1012 pub fn get_frame_type(&self) -> FrameType { self.frame_type }
1013 /// Reports whether the frame is a keyframe.
1014 pub fn is_keyframe(&self) -> bool { self.key }
1015 /// Sets new frame type.
1016 pub fn set_frame_type(&mut self, ftype: FrameType) { self.frame_type = ftype; }
1017 /// Sets keyframe flag.
1018 pub fn set_keyframe(&mut self, key: bool) { self.key = key; }
1019 /// Returns frame timestamp.
1020 pub fn get_time_information(&self) -> NATimeInfo { self.ts }
1021 /// Returns frame presentation time.
1022 pub fn get_pts(&self) -> Option<u64> { self.ts.get_pts() }
1023 /// Returns frame decoding time.
1024 pub fn get_dts(&self) -> Option<u64> { self.ts.get_dts() }
1025 /// Returns picture ID.
1026 pub fn get_id(&self) -> i64 { self.id }
1027 /// Returns frame display duration.
1028 pub fn get_duration(&self) -> Option<u64> { self.ts.get_duration() }
1029 /// Sets new presentation timestamp.
1030 pub fn set_pts(&mut self, pts: Option<u64>) { self.ts.set_pts(pts); }
1031 /// Sets new decoding timestamp.
1032 pub fn set_dts(&mut self, dts: Option<u64>) { self.ts.set_dts(dts); }
1033 /// Sets new picture ID.
1034 pub fn set_id(&mut self, id: i64) { self.id = id; }
1035 /// Sets new duration.
1036 pub fn set_duration(&mut self, dur: Option<u64>) { self.ts.set_duration(dur); }
1037
1038 /// Returns a reference to the frame data.
1039 pub fn get_buffer(&self) -> NABufferType { self.buffer.clone() }
1040
1041 /// Converts current instance into a reference-counted one.
1042 pub fn into_ref(self) -> NAFrameRef { Arc::new(self) }
1043
1044 /// Creates new frame with metadata from `NAPacket`.
1045 pub fn new_from_pkt(pkt: &NAPacket, info: NACodecInfoRef, buf: NABufferType) -> NAFrame {
1046 NAFrame::new(pkt.ts, FrameType::Other, pkt.keyframe, info, /*HashMap::new(),*/ buf)
1047 }
1048 }
1049
1050 impl fmt::Display for NAFrame {
1051 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1052 let mut ostr = format!("frame type {}", self.frame_type);
1053 if let Some(pts) = self.ts.pts { ostr = format!("{} pts {}", ostr, pts); }
1054 if let Some(dts) = self.ts.dts { ostr = format!("{} dts {}", ostr, dts); }
1055 if let Some(dur) = self.ts.duration { ostr = format!("{} duration {}", ostr, dur); }
1056 if self.key { ostr = format!("{} kf", ostr); }
1057 write!(f, "[{}]", ostr)
1058 }
1059 }
1060
1061 /// A list of possible stream types.
1062 #[derive(Debug,Clone,Copy,PartialEq)]
1063 #[allow(dead_code)]
1064 pub enum StreamType {
1065 /// Video stream.
1066 Video,
1067 /// Audio stream.
1068 Audio,
1069 /// Subtitles.
1070 Subtitles,
1071 /// Any data stream (or might be an unrecognized audio/video stream).
1072 Data,
1073 /// Nonexistent stream.
1074 None,
1075 }
1076
1077 impl fmt::Display for StreamType {
1078 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1079 match *self {
1080 StreamType::Video => write!(f, "Video"),
1081 StreamType::Audio => write!(f, "Audio"),
1082 StreamType::Subtitles => write!(f, "Subtitles"),
1083 StreamType::Data => write!(f, "Data"),
1084 StreamType::None => write!(f, "-"),
1085 }
1086 }
1087 }
1088
1089 /// Stream data.
1090 #[allow(dead_code)]
1091 #[derive(Clone)]
1092 pub struct NAStream {
1093 media_type: StreamType,
1094 /// Stream ID.
1095 pub id: u32,
1096 num: usize,
1097 info: NACodecInfoRef,
1098 /// Timebase numerator.
1099 pub tb_num: u32,
1100 /// Timebase denominator.
1101 pub tb_den: u32,
1102 }
1103
1104 /// A specialised reference-counted `NAStream` type.
1105 pub type NAStreamRef = Arc<NAStream>;
1106
1107 /// Downscales the timebase by its greatest common denominator.
1108 pub fn reduce_timebase(tb_num: u32, tb_den: u32) -> (u32, u32) {
1109 if tb_num == 0 { return (tb_num, tb_den); }
1110 if (tb_den % tb_num) == 0 { return (1, tb_den / tb_num); }
1111
1112 let mut a = tb_num;
1113 let mut b = tb_den;
1114
1115 while a != b {
1116 if a > b { a -= b; }
1117 else if b > a { b -= a; }
1118 }
1119
1120 (tb_num / a, tb_den / a)
1121 }
1122
1123 impl NAStream {
1124 /// Constructs a new `NAStream` instance.
1125 pub fn new(mt: StreamType, id: u32, info: NACodecInfo, tb_num: u32, tb_den: u32) -> Self {
1126 let (n, d) = reduce_timebase(tb_num, tb_den);
1127 NAStream { media_type: mt, id, num: 0, info: info.into_ref(), tb_num: n, tb_den: d }
1128 }
1129 /// Returns stream id.
1130 pub fn get_id(&self) -> u32 { self.id }
1131 /// Returns stream type.
1132 pub fn get_media_type(&self) -> StreamType { self.media_type }
1133 /// Returns stream number assigned by demuxer.
1134 pub fn get_num(&self) -> usize { self.num }
1135 /// Sets stream number.
1136 pub fn set_num(&mut self, num: usize) { self.num = num; }
1137 /// Returns codec information.
1138 pub fn get_info(&self) -> NACodecInfoRef { self.info.clone() }
1139 /// Returns stream timebase.
1140 pub fn get_timebase(&self) -> (u32, u32) { (self.tb_num, self.tb_den) }
1141 /// Sets new stream timebase.
1142 pub fn set_timebase(&mut self, tb_num: u32, tb_den: u32) {
1143 let (n, d) = reduce_timebase(tb_num, tb_den);
1144 self.tb_num = n;
1145 self.tb_den = d;
1146 }
1147 /// Converts current instance into a reference-counted one.
1148 pub fn into_ref(self) -> NAStreamRef { Arc::new(self) }
1149 }
1150
1151 impl fmt::Display for NAStream {
1152 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1153 write!(f, "({}#{} @ {}/{} - {})", self.media_type, self.id, self.tb_num, self.tb_den, self.info.get_properties())
1154 }
1155 }
1156
1157 /// Side data that may accompany demuxed data.
1158 #[derive(Clone)]
1159 pub enum NASideData {
1160 /// Palette information.
1161 ///
1162 /// This side data contains a flag signalling that palette has changed since previous time and a reference to the current palette.
1163 /// Palette is stored in 8-bit RGBA format.
1164 Palette(bool, Arc<[u8; 1024]>),
1165 /// Generic user data.
1166 UserData(Arc<Vec<u8>>),
1167 }
1168
1169 /// Packet with compressed data.
1170 #[allow(dead_code)]
1171 pub struct NAPacket {
1172 stream: NAStreamRef,
1173 /// Packet timestamp.
1174 pub ts: NATimeInfo,
1175 buffer: NABufferRef<Vec<u8>>,
1176 /// Keyframe flag.
1177 pub keyframe: bool,
1178 // options: HashMap<String, NAValue<'a>>,
1179 /// Packet side data (e.g. palette for paletted formats).
1180 pub side_data: Vec<NASideData>,
1181 }
1182
1183 impl NAPacket {
1184 /// Constructs a new `NAPacket` instance.
1185 pub fn new(str: NAStreamRef, ts: NATimeInfo, kf: bool, vec: Vec<u8>) -> Self {
1186 // let mut vec: Vec<u8> = Vec::new();
1187 // vec.resize(size, 0);
1188 NAPacket { stream: str, ts, keyframe: kf, buffer: NABufferRef::new(vec), side_data: Vec::new() }
1189 }
1190 /// Constructs a new `NAPacket` instance reusing a buffer reference.
1191 pub fn new_from_refbuf(str: NAStreamRef, ts: NATimeInfo, kf: bool, buffer: NABufferRef<Vec<u8>>) -> Self {
1192 NAPacket { stream: str, ts, keyframe: kf, buffer, side_data: Vec::new() }
1193 }
1194 /// Returns information about the stream packet belongs to.
1195 pub fn get_stream(&self) -> NAStreamRef { self.stream.clone() }
1196 /// Returns packet timestamp.
1197 pub fn get_time_information(&self) -> NATimeInfo { self.ts }
1198 /// Returns packet presentation timestamp.
1199 pub fn get_pts(&self) -> Option<u64> { self.ts.get_pts() }
1200 /// Returns packet decoding timestamp.
1201 pub fn get_dts(&self) -> Option<u64> { self.ts.get_dts() }
1202 /// Returns packet duration.
1203 pub fn get_duration(&self) -> Option<u64> { self.ts.get_duration() }
1204 /// Reports whether this is a keyframe packet.
1205 pub fn is_keyframe(&self) -> bool { self.keyframe }
1206 /// Returns a reference to packet data.
1207 pub fn get_buffer(&self) -> NABufferRef<Vec<u8>> { self.buffer.clone() }
1208 /// Adds side data for a packet.
1209 pub fn add_side_data(&mut self, side_data: NASideData) { self.side_data.push(side_data); }
1210 /// Assigns packet to a new stream.
1211 pub fn reassign(&mut self, str: NAStreamRef, ts: NATimeInfo) {
1212 self.stream = str;
1213 self.ts = ts;
1214 }
1215 }
1216
1217 impl Drop for NAPacket {
1218 fn drop(&mut self) {}
1219 }
1220
1221 impl fmt::Display for NAPacket {
1222 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1223 let mut ostr = format!("[pkt for {} size {}", self.stream, self.buffer.len());
1224 if let Some(pts) = self.ts.pts { ostr = format!("{} pts {}", ostr, pts); }
1225 if let Some(dts) = self.ts.dts { ostr = format!("{} dts {}", ostr, dts); }
1226 if let Some(dur) = self.ts.duration { ostr = format!("{} duration {}", ostr, dur); }
1227 if self.keyframe { ostr = format!("{} kf", ostr); }
1228 ostr += "]";
1229 write!(f, "{}", ostr)
1230 }
1231 }