]>
Commit | Line | Data |
---|---|---|
1 | use std::cmp::max; | |
2 | use std::collections::HashMap; | |
3 | use std::fmt; | |
4 | use std::sync::Arc; | |
5 | pub use crate::formats::*; | |
6 | pub use crate::refs::*; | |
7 | ||
8 | #[allow(dead_code)] | |
9 | #[derive(Clone,Copy,PartialEq)] | |
10 | pub struct NAAudioInfo { | |
11 | pub sample_rate: u32, | |
12 | pub channels: u8, | |
13 | pub format: NASoniton, | |
14 | pub block_len: usize, | |
15 | } | |
16 | ||
17 | impl NAAudioInfo { | |
18 | pub fn new(sr: u32, ch: u8, fmt: NASoniton, bl: usize) -> Self { | |
19 | NAAudioInfo { sample_rate: sr, channels: ch, format: fmt, block_len: bl } | |
20 | } | |
21 | pub fn get_sample_rate(&self) -> u32 { self.sample_rate } | |
22 | pub fn get_channels(&self) -> u8 { self.channels } | |
23 | pub fn get_format(&self) -> NASoniton { self.format } | |
24 | pub fn get_block_len(&self) -> usize { self.block_len } | |
25 | } | |
26 | ||
27 | impl fmt::Display for NAAudioInfo { | |
28 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | |
29 | write!(f, "{} Hz, {} ch", self.sample_rate, self.channels) | |
30 | } | |
31 | } | |
32 | ||
33 | #[allow(dead_code)] | |
34 | #[derive(Clone,Copy,PartialEq)] | |
35 | pub struct NAVideoInfo { | |
36 | width: usize, | |
37 | height: usize, | |
38 | flipped: bool, | |
39 | format: NAPixelFormaton, | |
40 | } | |
41 | ||
42 | impl NAVideoInfo { | |
43 | pub fn new(w: usize, h: usize, flip: bool, fmt: NAPixelFormaton) -> Self { | |
44 | NAVideoInfo { width: w, height: h, flipped: flip, format: fmt } | |
45 | } | |
46 | pub fn get_width(&self) -> usize { self.width as usize } | |
47 | pub fn get_height(&self) -> usize { self.height as usize } | |
48 | pub fn is_flipped(&self) -> bool { self.flipped } | |
49 | pub fn get_format(&self) -> NAPixelFormaton { self.format } | |
50 | pub fn set_width(&mut self, w: usize) { self.width = w; } | |
51 | pub fn set_height(&mut self, h: usize) { self.height = h; } | |
52 | } | |
53 | ||
54 | impl fmt::Display for NAVideoInfo { | |
55 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | |
56 | write!(f, "{}x{}", self.width, self.height) | |
57 | } | |
58 | } | |
59 | ||
60 | #[derive(Clone,Copy,PartialEq)] | |
61 | pub enum NACodecTypeInfo { | |
62 | None, | |
63 | Audio(NAAudioInfo), | |
64 | Video(NAVideoInfo), | |
65 | } | |
66 | ||
67 | impl NACodecTypeInfo { | |
68 | pub fn get_video_info(&self) -> Option<NAVideoInfo> { | |
69 | match *self { | |
70 | NACodecTypeInfo::Video(vinfo) => Some(vinfo), | |
71 | _ => None, | |
72 | } | |
73 | } | |
74 | pub fn get_audio_info(&self) -> Option<NAAudioInfo> { | |
75 | match *self { | |
76 | NACodecTypeInfo::Audio(ainfo) => Some(ainfo), | |
77 | _ => None, | |
78 | } | |
79 | } | |
80 | pub fn is_video(&self) -> bool { | |
81 | match *self { | |
82 | NACodecTypeInfo::Video(_) => true, | |
83 | _ => false, | |
84 | } | |
85 | } | |
86 | pub fn is_audio(&self) -> bool { | |
87 | match *self { | |
88 | NACodecTypeInfo::Audio(_) => true, | |
89 | _ => false, | |
90 | } | |
91 | } | |
92 | } | |
93 | ||
94 | impl fmt::Display for NACodecTypeInfo { | |
95 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | |
96 | let ret = match *self { | |
97 | NACodecTypeInfo::None => "".to_string(), | |
98 | NACodecTypeInfo::Audio(fmt) => format!("{}", fmt), | |
99 | NACodecTypeInfo::Video(fmt) => format!("{}", fmt), | |
100 | }; | |
101 | write!(f, "{}", ret) | |
102 | } | |
103 | } | |
104 | ||
105 | #[derive(Clone)] | |
106 | pub struct NAVideoBuffer<T> { | |
107 | info: NAVideoInfo, | |
108 | data: NABufferRef<Vec<T>>, | |
109 | offs: Vec<usize>, | |
110 | strides: Vec<usize>, | |
111 | } | |
112 | ||
113 | impl<T: Clone> NAVideoBuffer<T> { | |
114 | pub fn get_offset(&self, idx: usize) -> usize { | |
115 | if idx >= self.offs.len() { 0 } | |
116 | else { self.offs[idx] } | |
117 | } | |
118 | pub fn get_info(&self) -> NAVideoInfo { self.info } | |
119 | pub fn get_data(&self) -> &Vec<T> { self.data.as_ref() } | |
120 | pub fn get_data_mut(&mut self) -> Option<&mut Vec<T>> { self.data.as_mut() } | |
121 | pub fn get_num_components(&self) -> usize { self.offs.len() } | |
122 | pub fn copy_buffer(&mut self) -> Self { | |
123 | let mut data: Vec<T> = Vec::with_capacity(self.data.len()); | |
124 | data.clone_from(self.data.as_ref()); | |
125 | let mut offs: Vec<usize> = Vec::with_capacity(self.offs.len()); | |
126 | offs.clone_from(&self.offs); | |
127 | let mut strides: Vec<usize> = Vec::with_capacity(self.strides.len()); | |
128 | strides.clone_from(&self.strides); | |
129 | NAVideoBuffer { info: self.info, data: NABufferRef::new(data), offs, strides } | |
130 | } | |
131 | pub fn get_stride(&self, idx: usize) -> usize { | |
132 | if idx >= self.strides.len() { return 0; } | |
133 | self.strides[idx] | |
134 | } | |
135 | pub fn get_dimensions(&self, idx: usize) -> (usize, usize) { | |
136 | get_plane_size(&self.info, idx) | |
137 | } | |
138 | pub fn into_ref(self) -> NABufferRef<Self> { | |
139 | NABufferRef::new(self) | |
140 | } | |
141 | } | |
142 | ||
143 | pub type NAVideoBufferRef<T> = NABufferRef<NAVideoBuffer<T>>; | |
144 | ||
145 | #[derive(Clone)] | |
146 | pub struct NAAudioBuffer<T> { | |
147 | info: NAAudioInfo, | |
148 | data: NABufferRef<Vec<T>>, | |
149 | offs: Vec<usize>, | |
150 | stride: usize, | |
151 | chmap: NAChannelMap, | |
152 | len: usize, | |
153 | } | |
154 | ||
155 | impl<T: Clone> NAAudioBuffer<T> { | |
156 | pub fn get_offset(&self, idx: usize) -> usize { | |
157 | if idx >= self.offs.len() { 0 } | |
158 | else { self.offs[idx] } | |
159 | } | |
160 | pub fn get_stride(&self) -> usize { self.stride } | |
161 | pub fn get_info(&self) -> NAAudioInfo { self.info } | |
162 | pub fn get_chmap(&self) -> &NAChannelMap { &self.chmap } | |
163 | pub fn get_data(&self) -> &Vec<T> { self.data.as_ref() } | |
164 | pub fn get_data_mut(&mut self) -> Option<&mut Vec<T>> { self.data.as_mut() } | |
165 | pub fn copy_buffer(&mut self) -> Self { | |
166 | let mut data: Vec<T> = Vec::with_capacity(self.data.len()); | |
167 | data.clone_from(self.data.as_ref()); | |
168 | let mut offs: Vec<usize> = Vec::with_capacity(self.offs.len()); | |
169 | offs.clone_from(&self.offs); | |
170 | NAAudioBuffer { info: self.info, data: NABufferRef::new(data), offs, chmap: self.get_chmap().clone(), len: self.len, stride: self.stride } | |
171 | } | |
172 | pub fn get_length(&self) -> usize { self.len } | |
173 | } | |
174 | ||
175 | impl NAAudioBuffer<u8> { | |
176 | pub fn new_from_buf(info: NAAudioInfo, data: NABufferRef<Vec<u8>>, chmap: NAChannelMap) -> Self { | |
177 | let len = data.len(); | |
178 | NAAudioBuffer { info, data, chmap, offs: Vec::new(), len, stride: 0 } | |
179 | } | |
180 | } | |
181 | ||
182 | #[derive(Clone)] | |
183 | pub enum NABufferType { | |
184 | Video (NAVideoBufferRef<u8>), | |
185 | Video16 (NAVideoBufferRef<u16>), | |
186 | Video32 (NAVideoBufferRef<u32>), | |
187 | VideoPacked(NAVideoBufferRef<u8>), | |
188 | AudioU8 (NAAudioBuffer<u8>), | |
189 | AudioI16 (NAAudioBuffer<i16>), | |
190 | AudioI32 (NAAudioBuffer<i32>), | |
191 | AudioF32 (NAAudioBuffer<f32>), | |
192 | AudioPacked(NAAudioBuffer<u8>), | |
193 | Data (NABufferRef<Vec<u8>>), | |
194 | None, | |
195 | } | |
196 | ||
197 | impl NABufferType { | |
198 | pub fn get_offset(&self, idx: usize) -> usize { | |
199 | match *self { | |
200 | NABufferType::Video(ref vb) => vb.get_offset(idx), | |
201 | NABufferType::Video16(ref vb) => vb.get_offset(idx), | |
202 | NABufferType::Video32(ref vb) => vb.get_offset(idx), | |
203 | NABufferType::VideoPacked(ref vb) => vb.get_offset(idx), | |
204 | NABufferType::AudioU8(ref ab) => ab.get_offset(idx), | |
205 | NABufferType::AudioI16(ref ab) => ab.get_offset(idx), | |
206 | NABufferType::AudioF32(ref ab) => ab.get_offset(idx), | |
207 | NABufferType::AudioPacked(ref ab) => ab.get_offset(idx), | |
208 | _ => 0, | |
209 | } | |
210 | } | |
211 | pub fn get_video_info(&self) -> Option<NAVideoInfo> { | |
212 | match *self { | |
213 | NABufferType::Video(ref vb) => Some(vb.get_info()), | |
214 | NABufferType::Video16(ref vb) => Some(vb.get_info()), | |
215 | NABufferType::Video32(ref vb) => Some(vb.get_info()), | |
216 | NABufferType::VideoPacked(ref vb) => Some(vb.get_info()), | |
217 | _ => None, | |
218 | } | |
219 | } | |
220 | pub fn get_vbuf(&self) -> Option<NAVideoBufferRef<u8>> { | |
221 | match *self { | |
222 | NABufferType::Video(ref vb) => Some(vb.clone()), | |
223 | NABufferType::VideoPacked(ref vb) => Some(vb.clone()), | |
224 | _ => None, | |
225 | } | |
226 | } | |
227 | pub fn get_vbuf16(&self) -> Option<NAVideoBufferRef<u16>> { | |
228 | match *self { | |
229 | NABufferType::Video16(ref vb) => Some(vb.clone()), | |
230 | _ => None, | |
231 | } | |
232 | } | |
233 | pub fn get_vbuf32(&self) -> Option<NAVideoBufferRef<u32>> { | |
234 | match *self { | |
235 | NABufferType::Video32(ref vb) => Some(vb.clone()), | |
236 | _ => None, | |
237 | } | |
238 | } | |
239 | pub fn get_audio_info(&self) -> Option<NAAudioInfo> { | |
240 | match *self { | |
241 | NABufferType::AudioU8(ref ab) => Some(ab.get_info()), | |
242 | NABufferType::AudioI16(ref ab) => Some(ab.get_info()), | |
243 | NABufferType::AudioI32(ref ab) => Some(ab.get_info()), | |
244 | NABufferType::AudioF32(ref ab) => Some(ab.get_info()), | |
245 | NABufferType::AudioPacked(ref ab) => Some(ab.get_info()), | |
246 | _ => None, | |
247 | } | |
248 | } | |
249 | pub fn get_chmap(&self) -> Option<&NAChannelMap> { | |
250 | match *self { | |
251 | NABufferType::AudioU8(ref ab) => Some(ab.get_chmap()), | |
252 | NABufferType::AudioI16(ref ab) => Some(ab.get_chmap()), | |
253 | NABufferType::AudioI32(ref ab) => Some(ab.get_chmap()), | |
254 | NABufferType::AudioF32(ref ab) => Some(ab.get_chmap()), | |
255 | NABufferType::AudioPacked(ref ab) => Some(ab.get_chmap()), | |
256 | _ => None, | |
257 | } | |
258 | } | |
259 | pub fn get_audio_length(&self) -> usize { | |
260 | match *self { | |
261 | NABufferType::AudioU8(ref ab) => ab.get_length(), | |
262 | NABufferType::AudioI16(ref ab) => ab.get_length(), | |
263 | NABufferType::AudioI32(ref ab) => ab.get_length(), | |
264 | NABufferType::AudioF32(ref ab) => ab.get_length(), | |
265 | NABufferType::AudioPacked(ref ab) => ab.get_length(), | |
266 | _ => 0, | |
267 | } | |
268 | } | |
269 | pub fn get_audio_stride(&self) -> usize { | |
270 | match *self { | |
271 | NABufferType::AudioU8(ref ab) => ab.get_stride(), | |
272 | NABufferType::AudioI16(ref ab) => ab.get_stride(), | |
273 | NABufferType::AudioI32(ref ab) => ab.get_stride(), | |
274 | NABufferType::AudioF32(ref ab) => ab.get_stride(), | |
275 | NABufferType::AudioPacked(ref ab) => ab.get_stride(), | |
276 | _ => 0, | |
277 | } | |
278 | } | |
279 | pub fn get_abuf_u8(&self) -> Option<NAAudioBuffer<u8>> { | |
280 | match *self { | |
281 | NABufferType::AudioU8(ref ab) => Some(ab.clone()), | |
282 | NABufferType::AudioPacked(ref ab) => Some(ab.clone()), | |
283 | _ => None, | |
284 | } | |
285 | } | |
286 | pub fn get_abuf_i16(&self) -> Option<NAAudioBuffer<i16>> { | |
287 | match *self { | |
288 | NABufferType::AudioI16(ref ab) => Some(ab.clone()), | |
289 | _ => None, | |
290 | } | |
291 | } | |
292 | pub fn get_abuf_i32(&self) -> Option<NAAudioBuffer<i32>> { | |
293 | match *self { | |
294 | NABufferType::AudioI32(ref ab) => Some(ab.clone()), | |
295 | _ => None, | |
296 | } | |
297 | } | |
298 | pub fn get_abuf_f32(&self) -> Option<NAAudioBuffer<f32>> { | |
299 | match *self { | |
300 | NABufferType::AudioF32(ref ab) => Some(ab.clone()), | |
301 | _ => None, | |
302 | } | |
303 | } | |
304 | } | |
305 | ||
306 | const NA_SIMPLE_VFRAME_COMPONENTS: usize = 4; | |
307 | pub struct NASimpleVideoFrame<'a, T: Copy> { | |
308 | pub width: [usize; NA_SIMPLE_VFRAME_COMPONENTS], | |
309 | pub height: [usize; NA_SIMPLE_VFRAME_COMPONENTS], | |
310 | pub flip: bool, | |
311 | pub stride: [usize; NA_SIMPLE_VFRAME_COMPONENTS], | |
312 | pub offset: [usize; NA_SIMPLE_VFRAME_COMPONENTS], | |
313 | pub components: usize, | |
314 | pub data: &'a mut [T], | |
315 | } | |
316 | ||
317 | impl<'a, T:Copy> NASimpleVideoFrame<'a, T> { | |
318 | pub fn from_video_buf(vbuf: &'a mut NAVideoBuffer<T>) -> Option<Self> { | |
319 | let vinfo = vbuf.get_info(); | |
320 | let components = vinfo.format.components as usize; | |
321 | if components > NA_SIMPLE_VFRAME_COMPONENTS { | |
322 | return None; | |
323 | } | |
324 | let mut w: [usize; NA_SIMPLE_VFRAME_COMPONENTS] = [0; NA_SIMPLE_VFRAME_COMPONENTS]; | |
325 | let mut h: [usize; NA_SIMPLE_VFRAME_COMPONENTS] = [0; NA_SIMPLE_VFRAME_COMPONENTS]; | |
326 | let mut s: [usize; NA_SIMPLE_VFRAME_COMPONENTS] = [0; NA_SIMPLE_VFRAME_COMPONENTS]; | |
327 | let mut o: [usize; NA_SIMPLE_VFRAME_COMPONENTS] = [0; NA_SIMPLE_VFRAME_COMPONENTS]; | |
328 | for comp in 0..components { | |
329 | let (width, height) = vbuf.get_dimensions(comp); | |
330 | w[comp] = width; | |
331 | h[comp] = height; | |
332 | s[comp] = vbuf.get_stride(comp); | |
333 | o[comp] = vbuf.get_offset(comp); | |
334 | } | |
335 | let flip = vinfo.flipped; | |
336 | Some(NASimpleVideoFrame { | |
337 | width: w, | |
338 | height: h, | |
339 | flip, | |
340 | stride: s, | |
341 | offset: o, | |
342 | components, | |
343 | data: vbuf.data.as_mut_slice(), | |
344 | }) | |
345 | } | |
346 | } | |
347 | ||
348 | #[derive(Debug,Clone,Copy,PartialEq)] | |
349 | pub enum AllocatorError { | |
350 | TooLargeDimensions, | |
351 | FormatError, | |
352 | } | |
353 | ||
354 | pub fn alloc_video_buffer(vinfo: NAVideoInfo, align: u8) -> Result<NABufferType, AllocatorError> { | |
355 | let fmt = &vinfo.format; | |
356 | let mut new_size: usize = 0; | |
357 | let mut offs: Vec<usize> = Vec::new(); | |
358 | let mut strides: Vec<usize> = Vec::new(); | |
359 | ||
360 | for i in 0..fmt.get_num_comp() { | |
361 | if fmt.get_chromaton(i) == None { return Err(AllocatorError::FormatError); } | |
362 | } | |
363 | ||
364 | let align_mod = ((1 << align) as usize) - 1; | |
365 | let width = ((vinfo.width as usize) + align_mod) & !align_mod; | |
366 | let height = ((vinfo.height as usize) + align_mod) & !align_mod; | |
367 | let mut max_depth = 0; | |
368 | let mut all_packed = true; | |
369 | let mut all_bytealigned = true; | |
370 | for i in 0..fmt.get_num_comp() { | |
371 | let ochr = fmt.get_chromaton(i); | |
372 | if ochr.is_none() { continue; } | |
373 | let chr = ochr.unwrap(); | |
374 | if !chr.is_packed() { | |
375 | all_packed = false; | |
376 | } else if ((chr.get_shift() + chr.get_depth()) & 7) != 0 { | |
377 | all_bytealigned = false; | |
378 | } | |
379 | max_depth = max(max_depth, chr.get_depth()); | |
380 | } | |
381 | let unfit_elem_size = match fmt.get_elem_size() { | |
382 | 2 | 4 => false, | |
383 | _ => true, | |
384 | }; | |
385 | ||
386 | //todo semi-packed like NV12 | |
387 | if fmt.is_paletted() { | |
388 | //todo various-sized palettes? | |
389 | let stride = vinfo.get_format().get_chromaton(0).unwrap().get_linesize(width); | |
390 | let pic_sz = stride.checked_mul(height); | |
391 | if pic_sz == None { return Err(AllocatorError::TooLargeDimensions); } | |
392 | let pal_size = 256 * (fmt.get_elem_size() as usize); | |
393 | let new_size = pic_sz.unwrap().checked_add(pal_size); | |
394 | if new_size == None { return Err(AllocatorError::TooLargeDimensions); } | |
395 | offs.push(0); | |
396 | offs.push(stride * height); | |
397 | strides.push(stride); | |
398 | let data: Vec<u8> = vec![0; new_size.unwrap()]; | |
399 | let buf: NAVideoBuffer<u8> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides }; | |
400 | Ok(NABufferType::Video(buf.into_ref())) | |
401 | } else if !all_packed { | |
402 | for i in 0..fmt.get_num_comp() { | |
403 | let ochr = fmt.get_chromaton(i); | |
404 | if ochr.is_none() { continue; } | |
405 | let chr = ochr.unwrap(); | |
406 | offs.push(new_size as usize); | |
407 | let stride = chr.get_linesize(width); | |
408 | let cur_h = chr.get_height(height); | |
409 | let cur_sz = stride.checked_mul(cur_h); | |
410 | if cur_sz == None { return Err(AllocatorError::TooLargeDimensions); } | |
411 | let new_sz = new_size.checked_add(cur_sz.unwrap()); | |
412 | if new_sz == None { return Err(AllocatorError::TooLargeDimensions); } | |
413 | new_size = new_sz.unwrap(); | |
414 | strides.push(stride); | |
415 | } | |
416 | if max_depth <= 8 { | |
417 | let data: Vec<u8> = vec![0; new_size]; | |
418 | let buf: NAVideoBuffer<u8> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides }; | |
419 | Ok(NABufferType::Video(buf.into_ref())) | |
420 | } else if max_depth <= 16 { | |
421 | let data: Vec<u16> = vec![0; new_size]; | |
422 | let buf: NAVideoBuffer<u16> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides }; | |
423 | Ok(NABufferType::Video16(buf.into_ref())) | |
424 | } else { | |
425 | let data: Vec<u32> = vec![0; new_size]; | |
426 | let buf: NAVideoBuffer<u32> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides }; | |
427 | Ok(NABufferType::Video32(buf.into_ref())) | |
428 | } | |
429 | } else if all_bytealigned || unfit_elem_size { | |
430 | let elem_sz = fmt.get_elem_size(); | |
431 | let line_sz = width.checked_mul(elem_sz as usize); | |
432 | if line_sz == None { return Err(AllocatorError::TooLargeDimensions); } | |
433 | let new_sz = line_sz.unwrap().checked_mul(height); | |
434 | if new_sz == None { return Err(AllocatorError::TooLargeDimensions); } | |
435 | new_size = new_sz.unwrap(); | |
436 | let data: Vec<u8> = vec![0; new_size]; | |
437 | strides.push(line_sz.unwrap()); | |
438 | let buf: NAVideoBuffer<u8> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides }; | |
439 | Ok(NABufferType::VideoPacked(buf.into_ref())) | |
440 | } else { | |
441 | let elem_sz = fmt.get_elem_size(); | |
442 | let new_sz = width.checked_mul(height); | |
443 | if new_sz == None { return Err(AllocatorError::TooLargeDimensions); } | |
444 | new_size = new_sz.unwrap(); | |
445 | match elem_sz { | |
446 | 2 => { | |
447 | let data: Vec<u16> = vec![0; new_size]; | |
448 | strides.push(width); | |
449 | let buf: NAVideoBuffer<u16> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides }; | |
450 | Ok(NABufferType::Video16(buf.into_ref())) | |
451 | }, | |
452 | 4 => { | |
453 | let data: Vec<u32> = vec![0; new_size]; | |
454 | strides.push(width); | |
455 | let buf: NAVideoBuffer<u32> = NAVideoBuffer { data: NABufferRef::new(data), info: vinfo, offs, strides }; | |
456 | Ok(NABufferType::Video32(buf.into_ref())) | |
457 | }, | |
458 | _ => unreachable!(), | |
459 | } | |
460 | } | |
461 | } | |
462 | ||
463 | #[allow(clippy::collapsible_if)] | |
464 | pub fn alloc_audio_buffer(ainfo: NAAudioInfo, nsamples: usize, chmap: NAChannelMap) -> Result<NABufferType, AllocatorError> { | |
465 | let mut offs: Vec<usize> = Vec::new(); | |
466 | if ainfo.format.is_planar() || (ainfo.channels == 1 && (ainfo.format.get_bits() % 8) == 0) { | |
467 | let len = nsamples.checked_mul(ainfo.channels as usize); | |
468 | if len == None { return Err(AllocatorError::TooLargeDimensions); } | |
469 | let length = len.unwrap(); | |
470 | let stride = nsamples; | |
471 | for i in 0..ainfo.channels { | |
472 | offs.push((i as usize) * stride); | |
473 | } | |
474 | if ainfo.format.is_float() { | |
475 | if ainfo.format.get_bits() == 32 { | |
476 | let data: Vec<f32> = vec![0.0; length]; | |
477 | let buf: NAAudioBuffer<f32> = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs, chmap, len: nsamples, stride }; | |
478 | Ok(NABufferType::AudioF32(buf)) | |
479 | } else { | |
480 | Err(AllocatorError::TooLargeDimensions) | |
481 | } | |
482 | } else { | |
483 | if ainfo.format.get_bits() == 8 && !ainfo.format.is_signed() { | |
484 | let data: Vec<u8> = vec![0; length]; | |
485 | let buf: NAAudioBuffer<u8> = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs, chmap, len: nsamples, stride }; | |
486 | Ok(NABufferType::AudioU8(buf)) | |
487 | } else if ainfo.format.get_bits() == 16 && ainfo.format.is_signed() { | |
488 | let data: Vec<i16> = vec![0; length]; | |
489 | let buf: NAAudioBuffer<i16> = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs, chmap, len: nsamples, stride }; | |
490 | Ok(NABufferType::AudioI16(buf)) | |
491 | } else { | |
492 | Err(AllocatorError::TooLargeDimensions) | |
493 | } | |
494 | } | |
495 | } else { | |
496 | let len = nsamples.checked_mul(ainfo.channels as usize); | |
497 | if len == None { return Err(AllocatorError::TooLargeDimensions); } | |
498 | let length = ainfo.format.get_audio_size(len.unwrap() as u64); | |
499 | let data: Vec<u8> = vec![0; length]; | |
500 | let buf: NAAudioBuffer<u8> = NAAudioBuffer { data: NABufferRef::new(data), info: ainfo, offs, chmap, len: nsamples, stride: 0 }; | |
501 | Ok(NABufferType::AudioPacked(buf)) | |
502 | } | |
503 | } | |
504 | ||
505 | pub fn alloc_data_buffer(size: usize) -> Result<NABufferType, AllocatorError> { | |
506 | let data: Vec<u8> = vec![0; size]; | |
507 | let buf: NABufferRef<Vec<u8>> = NABufferRef::new(data); | |
508 | Ok(NABufferType::Data(buf)) | |
509 | } | |
510 | ||
511 | pub fn copy_buffer(buf: NABufferType) -> NABufferType { | |
512 | buf.clone() | |
513 | } | |
514 | ||
515 | pub struct NAVideoBufferPool<T:Copy> { | |
516 | pool: Vec<NAVideoBufferRef<T>>, | |
517 | max_len: usize, | |
518 | add_len: usize, | |
519 | } | |
520 | ||
521 | impl<T:Copy> NAVideoBufferPool<T> { | |
522 | pub fn new(max_len: usize) -> Self { | |
523 | Self { | |
524 | pool: Vec::with_capacity(max_len), | |
525 | max_len, | |
526 | add_len: 0, | |
527 | } | |
528 | } | |
529 | pub fn set_dec_bufs(&mut self, add_len: usize) { | |
530 | self.add_len = add_len; | |
531 | } | |
532 | pub fn get_free(&mut self) -> Option<NAVideoBufferRef<T>> { | |
533 | for e in self.pool.iter() { | |
534 | if e.get_num_refs() == 1 { | |
535 | return Some(e.clone()); | |
536 | } | |
537 | } | |
538 | None | |
539 | } | |
540 | pub fn get_copy(&mut self, rbuf: &NAVideoBufferRef<T>) -> Option<NAVideoBufferRef<T>> { | |
541 | let mut dbuf = self.get_free()?; | |
542 | dbuf.data.copy_from_slice(&rbuf.data); | |
543 | Some(dbuf) | |
544 | } | |
545 | pub fn reset(&mut self) { | |
546 | self.pool.truncate(0); | |
547 | } | |
548 | } | |
549 | ||
550 | impl NAVideoBufferPool<u8> { | |
551 | pub fn prealloc_video(&mut self, vinfo: NAVideoInfo, align: u8) -> Result<(), AllocatorError> { | |
552 | let nbufs = self.max_len + self.add_len - self.pool.len(); | |
553 | for _ in 0..nbufs { | |
554 | let vbuf = alloc_video_buffer(vinfo, align)?; | |
555 | if let NABufferType::Video(buf) = vbuf { | |
556 | self.pool.push(buf); | |
557 | } else if let NABufferType::VideoPacked(buf) = vbuf { | |
558 | self.pool.push(buf); | |
559 | } else { | |
560 | return Err(AllocatorError::FormatError); | |
561 | } | |
562 | } | |
563 | Ok(()) | |
564 | } | |
565 | } | |
566 | ||
567 | impl NAVideoBufferPool<u16> { | |
568 | pub fn prealloc_video(&mut self, vinfo: NAVideoInfo, align: u8) -> Result<(), AllocatorError> { | |
569 | let nbufs = self.max_len + self.add_len - self.pool.len(); | |
570 | for _ in 0..nbufs { | |
571 | let vbuf = alloc_video_buffer(vinfo, align)?; | |
572 | if let NABufferType::Video16(buf) = vbuf { | |
573 | self.pool.push(buf); | |
574 | } else { | |
575 | return Err(AllocatorError::FormatError); | |
576 | } | |
577 | } | |
578 | Ok(()) | |
579 | } | |
580 | } | |
581 | ||
582 | impl NAVideoBufferPool<u32> { | |
583 | pub fn prealloc_video(&mut self, vinfo: NAVideoInfo, align: u8) -> Result<(), AllocatorError> { | |
584 | let nbufs = self.max_len + self.add_len - self.pool.len(); | |
585 | for _ in 0..nbufs { | |
586 | let vbuf = alloc_video_buffer(vinfo, align)?; | |
587 | if let NABufferType::Video32(buf) = vbuf { | |
588 | self.pool.push(buf); | |
589 | } else { | |
590 | return Err(AllocatorError::FormatError); | |
591 | } | |
592 | } | |
593 | Ok(()) | |
594 | } | |
595 | } | |
596 | ||
597 | #[allow(dead_code)] | |
598 | #[derive(Clone)] | |
599 | pub struct NACodecInfo { | |
600 | name: &'static str, | |
601 | properties: NACodecTypeInfo, | |
602 | extradata: Option<Arc<Vec<u8>>>, | |
603 | } | |
604 | ||
605 | pub type NACodecInfoRef = Arc<NACodecInfo>; | |
606 | ||
607 | impl NACodecInfo { | |
608 | pub fn new(name: &'static str, p: NACodecTypeInfo, edata: Option<Vec<u8>>) -> Self { | |
609 | let extradata = match edata { | |
610 | None => None, | |
611 | Some(vec) => Some(Arc::new(vec)), | |
612 | }; | |
613 | NACodecInfo { name, properties: p, extradata } | |
614 | } | |
615 | pub fn new_ref(name: &'static str, p: NACodecTypeInfo, edata: Option<Arc<Vec<u8>>>) -> Self { | |
616 | NACodecInfo { name, properties: p, extradata: edata } | |
617 | } | |
618 | pub fn into_ref(self) -> NACodecInfoRef { Arc::new(self) } | |
619 | pub fn get_properties(&self) -> NACodecTypeInfo { self.properties } | |
620 | pub fn get_extradata(&self) -> Option<Arc<Vec<u8>>> { | |
621 | if let Some(ref vec) = self.extradata { return Some(vec.clone()); } | |
622 | None | |
623 | } | |
624 | pub fn get_name(&self) -> &'static str { self.name } | |
625 | pub fn is_video(&self) -> bool { | |
626 | if let NACodecTypeInfo::Video(_) = self.properties { return true; } | |
627 | false | |
628 | } | |
629 | pub fn is_audio(&self) -> bool { | |
630 | if let NACodecTypeInfo::Audio(_) = self.properties { return true; } | |
631 | false | |
632 | } | |
633 | pub fn new_dummy() -> Arc<Self> { | |
634 | Arc::new(DUMMY_CODEC_INFO) | |
635 | } | |
636 | pub fn replace_info(&self, p: NACodecTypeInfo) -> Arc<Self> { | |
637 | Arc::new(NACodecInfo { name: self.name, properties: p, extradata: self.extradata.clone() }) | |
638 | } | |
639 | } | |
640 | ||
641 | impl Default for NACodecInfo { | |
642 | fn default() -> Self { DUMMY_CODEC_INFO } | |
643 | } | |
644 | ||
645 | impl fmt::Display for NACodecInfo { | |
646 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | |
647 | let edata = match self.extradata.clone() { | |
648 | None => "no extradata".to_string(), | |
649 | Some(v) => format!("{} byte(s) of extradata", v.len()), | |
650 | }; | |
651 | write!(f, "{}: {} {}", self.name, self.properties, edata) | |
652 | } | |
653 | } | |
654 | ||
655 | pub const DUMMY_CODEC_INFO: NACodecInfo = NACodecInfo { | |
656 | name: "none", | |
657 | properties: NACodecTypeInfo::None, | |
658 | extradata: None }; | |
659 | ||
660 | #[derive(Debug,Clone)] | |
661 | pub enum NAValue { | |
662 | None, | |
663 | Int(i32), | |
664 | Long(i64), | |
665 | String(String), | |
666 | Data(Arc<Vec<u8>>), | |
667 | } | |
668 | ||
669 | #[derive(Debug,Clone,Copy,PartialEq)] | |
670 | #[allow(dead_code)] | |
671 | pub enum FrameType { | |
672 | I, | |
673 | P, | |
674 | B, | |
675 | Skip, | |
676 | Other, | |
677 | } | |
678 | ||
679 | impl fmt::Display for FrameType { | |
680 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | |
681 | match *self { | |
682 | FrameType::I => write!(f, "I"), | |
683 | FrameType::P => write!(f, "P"), | |
684 | FrameType::B => write!(f, "B"), | |
685 | FrameType::Skip => write!(f, "skip"), | |
686 | FrameType::Other => write!(f, "x"), | |
687 | } | |
688 | } | |
689 | } | |
690 | ||
691 | #[derive(Debug,Clone,Copy)] | |
692 | pub struct NATimeInfo { | |
693 | pts: Option<u64>, | |
694 | dts: Option<u64>, | |
695 | duration: Option<u64>, | |
696 | tb_num: u32, | |
697 | tb_den: u32, | |
698 | } | |
699 | ||
700 | impl NATimeInfo { | |
701 | pub fn new(pts: Option<u64>, dts: Option<u64>, duration: Option<u64>, tb_num: u32, tb_den: u32) -> Self { | |
702 | NATimeInfo { pts, dts, duration, tb_num, tb_den } | |
703 | } | |
704 | pub fn get_pts(&self) -> Option<u64> { self.pts } | |
705 | pub fn get_dts(&self) -> Option<u64> { self.dts } | |
706 | pub fn get_duration(&self) -> Option<u64> { self.duration } | |
707 | pub fn set_pts(&mut self, pts: Option<u64>) { self.pts = pts; } | |
708 | pub fn set_dts(&mut self, dts: Option<u64>) { self.dts = dts; } | |
709 | pub fn set_duration(&mut self, dur: Option<u64>) { self.duration = dur; } | |
710 | ||
711 | pub fn time_to_ts(time: u64, base: u64, tb_num: u32, tb_den: u32) -> u64 { | |
712 | let tb_num = tb_num as u64; | |
713 | let tb_den = tb_den as u64; | |
714 | let tmp = time.checked_mul(tb_num); | |
715 | if let Some(tmp) = tmp { | |
716 | tmp / base / tb_den | |
717 | } else { | |
718 | let tmp = time.checked_mul(tb_num); | |
719 | if let Some(tmp) = tmp { | |
720 | tmp / base / tb_den | |
721 | } else { | |
722 | let coarse = time / base; | |
723 | let tmp = coarse.checked_mul(tb_num); | |
724 | if let Some(tmp) = tmp { | |
725 | tmp / tb_den | |
726 | } else { | |
727 | (coarse / tb_den) * tb_num | |
728 | } | |
729 | } | |
730 | } | |
731 | } | |
732 | pub fn ts_to_time(ts: u64, base: u64, tb_num: u32, tb_den: u32) -> u64 { | |
733 | let tb_num = tb_num as u64; | |
734 | let tb_den = tb_den as u64; | |
735 | let tmp = ts.checked_mul(base); | |
736 | if let Some(tmp) = tmp { | |
737 | let tmp2 = tmp.checked_mul(tb_num); | |
738 | if let Some(tmp2) = tmp2 { | |
739 | tmp2 / tb_den | |
740 | } else { | |
741 | (tmp / tb_den) * tb_num | |
742 | } | |
743 | } else { | |
744 | let tmp = ts.checked_mul(tb_num); | |
745 | if let Some(tmp) = tmp { | |
746 | (tmp / tb_den) * base | |
747 | } else { | |
748 | (ts / tb_den) * base * tb_num | |
749 | } | |
750 | } | |
751 | } | |
752 | } | |
753 | ||
754 | #[allow(dead_code)] | |
755 | #[derive(Clone)] | |
756 | pub struct NAFrame { | |
757 | ts: NATimeInfo, | |
758 | id: i64, | |
759 | buffer: NABufferType, | |
760 | info: NACodecInfoRef, | |
761 | ftype: FrameType, | |
762 | key: bool, | |
763 | options: HashMap<String, NAValue>, | |
764 | } | |
765 | ||
766 | pub type NAFrameRef = Arc<NAFrame>; | |
767 | ||
768 | fn get_plane_size(info: &NAVideoInfo, idx: usize) -> (usize, usize) { | |
769 | let chromaton = info.get_format().get_chromaton(idx); | |
770 | if chromaton.is_none() { return (0, 0); } | |
771 | let (hs, vs) = chromaton.unwrap().get_subsampling(); | |
772 | let w = (info.get_width() + ((1 << hs) - 1)) >> hs; | |
773 | let h = (info.get_height() + ((1 << vs) - 1)) >> vs; | |
774 | (w, h) | |
775 | } | |
776 | ||
777 | impl NAFrame { | |
778 | pub fn new(ts: NATimeInfo, | |
779 | ftype: FrameType, | |
780 | keyframe: bool, | |
781 | info: NACodecInfoRef, | |
782 | options: HashMap<String, NAValue>, | |
783 | buffer: NABufferType) -> Self { | |
784 | NAFrame { ts, id: 0, buffer, info, ftype, key: keyframe, options } | |
785 | } | |
786 | pub fn get_info(&self) -> NACodecInfoRef { self.info.clone() } | |
787 | pub fn get_frame_type(&self) -> FrameType { self.ftype } | |
788 | pub fn is_keyframe(&self) -> bool { self.key } | |
789 | pub fn set_frame_type(&mut self, ftype: FrameType) { self.ftype = ftype; } | |
790 | pub fn set_keyframe(&mut self, key: bool) { self.key = key; } | |
791 | pub fn get_time_information(&self) -> NATimeInfo { self.ts } | |
792 | pub fn get_pts(&self) -> Option<u64> { self.ts.get_pts() } | |
793 | pub fn get_dts(&self) -> Option<u64> { self.ts.get_dts() } | |
794 | pub fn get_id(&self) -> i64 { self.id } | |
795 | pub fn get_duration(&self) -> Option<u64> { self.ts.get_duration() } | |
796 | pub fn set_pts(&mut self, pts: Option<u64>) { self.ts.set_pts(pts); } | |
797 | pub fn set_dts(&mut self, dts: Option<u64>) { self.ts.set_dts(dts); } | |
798 | pub fn set_id(&mut self, id: i64) { self.id = id; } | |
799 | pub fn set_duration(&mut self, dur: Option<u64>) { self.ts.set_duration(dur); } | |
800 | ||
801 | pub fn get_buffer(&self) -> NABufferType { self.buffer.clone() } | |
802 | ||
803 | pub fn into_ref(self) -> NAFrameRef { Arc::new(self) } | |
804 | } | |
805 | ||
806 | impl fmt::Display for NAFrame { | |
807 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | |
808 | let mut ostr = format!("frame type {}", self.ftype); | |
809 | if let Some(pts) = self.ts.pts { ostr = format!("{} pts {}", ostr, pts); } | |
810 | if let Some(dts) = self.ts.dts { ostr = format!("{} dts {}", ostr, dts); } | |
811 | if let Some(dur) = self.ts.duration { ostr = format!("{} duration {}", ostr, dur); } | |
812 | if self.key { ostr = format!("{} kf", ostr); } | |
813 | write!(f, "[{}]", ostr) | |
814 | } | |
815 | } | |
816 | ||
817 | /// Possible stream types. | |
818 | #[derive(Debug,Clone,Copy,PartialEq)] | |
819 | #[allow(dead_code)] | |
820 | pub enum StreamType { | |
821 | /// video stream | |
822 | Video, | |
823 | /// audio stream | |
824 | Audio, | |
825 | /// subtitles | |
826 | Subtitles, | |
827 | /// any data stream (or might be an unrecognized audio/video stream) | |
828 | Data, | |
829 | /// nonexistent stream | |
830 | None, | |
831 | } | |
832 | ||
833 | impl fmt::Display for StreamType { | |
834 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | |
835 | match *self { | |
836 | StreamType::Video => write!(f, "Video"), | |
837 | StreamType::Audio => write!(f, "Audio"), | |
838 | StreamType::Subtitles => write!(f, "Subtitles"), | |
839 | StreamType::Data => write!(f, "Data"), | |
840 | StreamType::None => write!(f, "-"), | |
841 | } | |
842 | } | |
843 | } | |
844 | ||
845 | #[allow(dead_code)] | |
846 | #[derive(Clone)] | |
847 | pub struct NAStream { | |
848 | media_type: StreamType, | |
849 | id: u32, | |
850 | num: usize, | |
851 | info: NACodecInfoRef, | |
852 | tb_num: u32, | |
853 | tb_den: u32, | |
854 | } | |
855 | ||
856 | pub type NAStreamRef = Arc<NAStream>; | |
857 | ||
858 | pub fn reduce_timebase(tb_num: u32, tb_den: u32) -> (u32, u32) { | |
859 | if tb_num == 0 { return (tb_num, tb_den); } | |
860 | if (tb_den % tb_num) == 0 { return (1, tb_den / tb_num); } | |
861 | ||
862 | let mut a = tb_num; | |
863 | let mut b = tb_den; | |
864 | ||
865 | while a != b { | |
866 | if a > b { a -= b; } | |
867 | else if b > a { b -= a; } | |
868 | } | |
869 | ||
870 | (tb_num / a, tb_den / a) | |
871 | } | |
872 | ||
873 | impl NAStream { | |
874 | pub fn new(mt: StreamType, id: u32, info: NACodecInfo, tb_num: u32, tb_den: u32) -> Self { | |
875 | let (n, d) = reduce_timebase(tb_num, tb_den); | |
876 | NAStream { media_type: mt, id, num: 0, info: info.into_ref(), tb_num: n, tb_den: d } | |
877 | } | |
878 | pub fn get_id(&self) -> u32 { self.id } | |
879 | pub fn get_media_type(&self) -> StreamType { self.media_type } | |
880 | pub fn get_num(&self) -> usize { self.num } | |
881 | pub fn set_num(&mut self, num: usize) { self.num = num; } | |
882 | pub fn get_info(&self) -> NACodecInfoRef { self.info.clone() } | |
883 | pub fn get_timebase(&self) -> (u32, u32) { (self.tb_num, self.tb_den) } | |
884 | pub fn set_timebase(&mut self, tb_num: u32, tb_den: u32) { | |
885 | let (n, d) = reduce_timebase(tb_num, tb_den); | |
886 | self.tb_num = n; | |
887 | self.tb_den = d; | |
888 | } | |
889 | pub fn into_ref(self) -> NAStreamRef { Arc::new(self) } | |
890 | } | |
891 | ||
892 | impl fmt::Display for NAStream { | |
893 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | |
894 | write!(f, "({}#{} @ {}/{} - {})", self.media_type, self.id, self.tb_num, self.tb_den, self.info.get_properties()) | |
895 | } | |
896 | } | |
897 | ||
898 | #[allow(dead_code)] | |
899 | pub struct NAPacket { | |
900 | stream: NAStreamRef, | |
901 | ts: NATimeInfo, | |
902 | buffer: NABufferRef<Vec<u8>>, | |
903 | keyframe: bool, | |
904 | // options: HashMap<String, NAValue<'a>>, | |
905 | } | |
906 | ||
907 | impl NAPacket { | |
908 | pub fn new(str: NAStreamRef, ts: NATimeInfo, kf: bool, vec: Vec<u8>) -> Self { | |
909 | // let mut vec: Vec<u8> = Vec::new(); | |
910 | // vec.resize(size, 0); | |
911 | NAPacket { stream: str, ts, keyframe: kf, buffer: NABufferRef::new(vec) } | |
912 | } | |
913 | pub fn get_stream(&self) -> NAStreamRef { self.stream.clone() } | |
914 | pub fn get_time_information(&self) -> NATimeInfo { self.ts } | |
915 | pub fn get_pts(&self) -> Option<u64> { self.ts.get_pts() } | |
916 | pub fn get_dts(&self) -> Option<u64> { self.ts.get_dts() } | |
917 | pub fn get_duration(&self) -> Option<u64> { self.ts.get_duration() } | |
918 | pub fn is_keyframe(&self) -> bool { self.keyframe } | |
919 | pub fn get_buffer(&self) -> NABufferRef<Vec<u8>> { self.buffer.clone() } | |
920 | } | |
921 | ||
922 | impl Drop for NAPacket { | |
923 | fn drop(&mut self) {} | |
924 | } | |
925 | ||
926 | impl fmt::Display for NAPacket { | |
927 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | |
928 | let mut ostr = format!("[pkt for {} size {}", self.stream, self.buffer.len()); | |
929 | if let Some(pts) = self.ts.pts { ostr = format!("{} pts {}", ostr, pts); } | |
930 | if let Some(dts) = self.ts.dts { ostr = format!("{} dts {}", ostr, dts); } | |
931 | if let Some(dur) = self.ts.duration { ostr = format!("{} duration {}", ostr, dur); } | |
932 | if self.keyframe { ostr = format!("{} kf", ostr); } | |
933 | ostr += "]"; | |
934 | write!(f, "{}", ostr) | |
935 | } | |
936 | } | |
937 | ||
938 | pub trait FrameFromPacket { | |
939 | fn new_from_pkt(pkt: &NAPacket, info: NACodecInfoRef, buf: NABufferType) -> NAFrame; | |
940 | fn fill_timestamps(&mut self, pkt: &NAPacket); | |
941 | } | |
942 | ||
943 | impl FrameFromPacket for NAFrame { | |
944 | fn new_from_pkt(pkt: &NAPacket, info: NACodecInfoRef, buf: NABufferType) -> NAFrame { | |
945 | NAFrame::new(pkt.ts, FrameType::Other, pkt.keyframe, info, HashMap::new(), buf) | |
946 | } | |
947 | fn fill_timestamps(&mut self, pkt: &NAPacket) { | |
948 | self.ts = pkt.get_time_information(); | |
949 | } | |
950 | } | |
951 |