1 use std::thread::JoinHandle;
2 use std::sync::mpsc::{Receiver, SyncSender, TrySendError};
5 use sdl2::render::Texture;
7 use nihav_core::frame::{NABufferType, NAVideoBuffer};
8 use nihav_core::formats::*;
9 use nihav_core::codecs::*;
10 use nihav_core::scale::*;
12 use super::{DecoderStuff, DecoderType, DecoderState, DecodingState, DispQueue, FrameRecord, PktSendEvent, FRAME_QUEUE_LEN};
14 static VDEC_STATE: DecoderState = DecoderState::new();
16 pub const FRAME_QUEUE_SIZE: usize = 25;
18 pub const SDL_RGB_FMT: NAPixelFormaton = NAPixelFormaton { model: ColorModel::RGB(RGBSubmodel::RGB), components: 3,
20 Some(NAPixelChromaton { h_ss: 0, v_ss: 0, packed: true, depth: 8, shift: 0, comp_offs: 0, next_elem: 3 }),
21 Some(NAPixelChromaton { h_ss: 0, v_ss: 0, packed: true, depth: 8, shift: 0, comp_offs: 1, next_elem: 3 }),
22 Some(NAPixelChromaton { h_ss: 0, v_ss: 0, packed: true, depth: 8, shift: 0, comp_offs: 2, next_elem: 3 }),
24 ], elem_size: 3, be: false, alpha: false, palette: false };
26 pub struct VideoDecoder {
27 yuv_pool: NAVideoBufferPool<u8>,
28 rgb_pool: NAVideoBufferPool<u8>,
36 oinfo_yuv: NAVideoInfo,
37 oinfo_rgb: NAVideoInfo,
41 pub fn new(width: usize, height: usize, tb_num: u32, tb_den: u32, dec: DecoderStuff) -> Self {
42 let ofmt_rgb = ScaleInfo { width, height, fmt: SDL_RGB_FMT };
43 let ofmt_yuv = ScaleInfo { width, height, fmt: YUV420_FORMAT };
44 let oinfo_rgb = NAVideoInfo { width, height, flipped: false, format: SDL_RGB_FMT, bits: 24 };
45 let oinfo_yuv = NAVideoInfo { width, height, flipped: false, format: YUV420_FORMAT, bits: 12 };
47 yuv_pool: NAVideoBufferPool::new(FRAME_QUEUE_SIZE),
48 rgb_pool: NAVideoBufferPool::new(FRAME_QUEUE_SIZE),
50 dec, ofmt_yuv, ofmt_rgb, oinfo_yuv, oinfo_rgb,
51 scaler: NAScale::new(ofmt_rgb, ofmt_rgb).expect("creating scaler failed"),
52 ifmt: NAVideoInfo { width: 0, height: 0, flipped: false, format: SDL_RGB_FMT, bits: 24 },
55 fn convert_buf(&mut self, bt: NABufferType, ts: u64) -> Option<FrameRecord> {
56 let vinfo = bt.get_video_info().expect("this should be a video buffer");
57 if self.ifmt.get_width() != vinfo.get_width() ||
58 self.ifmt.get_height() != vinfo.get_height() ||
59 self.ifmt.get_format() != vinfo.get_format() {
61 let sc_ifmt = ScaleInfo { width: self.ifmt.get_width(), height: self.ifmt.get_height(), fmt: self.ifmt.get_format() };
62 let do_yuv = if let ColorModel::YUV(_) = self.ifmt.get_format().get_model() { true } else { false };
63 let ofmt = if do_yuv { self.ofmt_yuv } else { self.ofmt_rgb };
64 self.scaler = NAScale::new(sc_ifmt, ofmt).expect("scaling should not fail");
66 let mut opic = if let ColorModel::YUV(_) = self.ifmt.get_format().get_model() {
67 self.yuv_pool.prealloc_video(self.oinfo_yuv, 2).expect("video frame pool allocation failure");
68 while self.yuv_pool.get_free().is_none() {
69 if VDEC_STATE.is_flushing() {
72 std::thread::yield_now();
74 NABufferType::Video(self.yuv_pool.get_free().expect("video frame pool should have a free frame"))
76 self.rgb_pool.prealloc_video(self.oinfo_rgb, 0).expect("video frame pool allocation failure");
77 while self.rgb_pool.get_free().is_none() {
78 if VDEC_STATE.is_flushing() {
81 std::thread::yield_now();
83 NABufferType::VideoPacked(self.rgb_pool.get_free().expect("video frame pool should have a free frame"))
85 let ret = self.scaler.convert(&bt, &mut opic);
86 if ret.is_err() { println!(" scaler error {:?}", ret.err()); return None; }
88 let time = NATimeInfo::ts_to_time(ts, 1000, self.tb_num, self.tb_den);
91 pub fn next_frame(&mut self, pkt: &NAPacket) -> Option<FrameRecord> {
93 DecoderType::Video(ref mut vdec, ref mut reord) => {
94 if let Ok(frm) = vdec.decode(&mut self.dec.dsupp, pkt) {
96 while let Some(frm) = reord.get_frame() {
97 let bt = frm.get_buffer();
98 if let NABufferType::None = bt { continue; }
99 let ts = frm.get_dts().unwrap_or_else(|| frm.get_pts().unwrap_or(0));
100 return self.convert_buf(bt, ts);
104 DecoderType::VideoMT(ref mut vdec, ref mut reord) => {
105 let queue_id = reord.register_frame();
106 match vdec.queue_pkt(&mut self.dec.dsupp, &pkt, queue_id) {
109 while !vdec.can_take_input() || vdec.has_output() {
110 match vdec.get_frame() {
112 reord.add_frame(frm, id);
115 reord.drop_frame(id);
116 if err != DecoderError::MissingReference {
117 println!("frame {} decoding error {:?}", id, err);
122 match vdec.queue_pkt(&mut self.dec.dsupp, &pkt, queue_id) {
125 println!("still can't queue frame!");
126 VDEC_STATE.set_state(DecodingState::Error);
128 Err(err) => println!("queueing error {:?}", err),
131 Err(err) => println!("queueing error {:?}", err),
133 while let Some(frm) = reord.get_frame() {
134 let bt = frm.get_buffer();
135 if let NABufferType::None = bt { continue; }
136 let ts = frm.get_dts().unwrap_or_else(|| frm.get_pts().unwrap_or(0));
137 return self.convert_buf(bt, ts);
140 _ => panic!("not a video decoder!"),
144 pub fn more_frames(&mut self, do_not_wait: bool) -> Option<FrameRecord> {
146 DecoderType::Video(ref mut _dec, ref mut reord) => {
147 while let Some(frm) = reord.get_frame() {
148 let bt = frm.get_buffer();
149 if let NABufferType::None = bt { continue; }
150 let ts = frm.get_dts().unwrap_or_else(|| frm.get_pts().unwrap_or(0));
151 return self.convert_buf(bt, ts);
154 DecoderType::VideoMT(ref mut vdec, ref mut reord) => {
155 let mut got_some = false;
156 while vdec.has_output() {
157 match vdec.get_frame() {
159 reord.add_frame(frm, id);
163 reord.drop_frame(id);
164 if err != DecoderError::MissingReference {
165 println!("frame {} decoding error {:?}", id, err);
170 if !got_some && !do_not_wait {
171 match vdec.get_frame() {
173 reord.add_frame(frm, id);
175 (Err(DecoderError::NoFrame), _) => {},
177 reord.drop_frame(id);
178 if err != DecoderError::MissingReference {
179 println!("frame {} decoding error {:?}", id, err);
184 while let Some(frm) = reord.get_frame() {
185 let bt = frm.get_buffer();
186 if let NABufferType::None = bt { continue; }
187 let ts = frm.get_dts().unwrap_or_else(|| frm.get_pts().unwrap_or(0));
188 return self.convert_buf(bt, ts);
195 pub fn last_frame(&mut self) -> Option<FrameRecord> {
197 DecoderType::Video(ref mut _dec, ref mut reord) => {
198 while let Some(frm) = reord.get_last_frames() {
199 let bt = frm.get_buffer();
200 if let NABufferType::None = bt { continue; }
201 let ts = frm.get_dts().unwrap_or_else(|| frm.get_pts().unwrap_or(0));
202 return self.convert_buf(bt, ts);
205 DecoderType::VideoMT(ref mut _dec, ref mut reord) => {
206 while let Some(frm) = reord.get_last_frames() {
207 let bt = frm.get_buffer();
208 if let NABufferType::None = bt { continue; }
209 let ts = frm.get_dts().unwrap_or_else(|| frm.get_pts().unwrap_or(0));
210 return self.convert_buf(bt, ts);
217 pub fn flush(&mut self) {
219 DecoderType::Video(ref mut dec, ref mut reord) => {
223 DecoderType::VideoMT(ref mut dec, ref mut reord) => {
232 fn start_video_decoding(width: usize, height: usize, tb_num: u32, tb_den: u32, video_dec: DecoderStuff, vprecv: Receiver<PktSendEvent>, vfsend: SyncSender<(NABufferType, u64)>) -> JoinHandle<()> {
233 std::thread::Builder::new().name("vdecoder".to_string()).spawn(move ||{
234 VDEC_STATE.set_state(DecodingState::Waiting);
235 let mut vdec = VideoDecoder::new(width, height, tb_num, tb_den, video_dec);
236 let mut skip_mode = FrameSkipMode::None;
238 match vprecv.recv() {
239 Ok(PktSendEvent::Packet(pkt)) => {
240 if !VDEC_STATE.is_flushing() {
241 if let Some((buf, time)) = vdec.next_frame(&pkt) {
242 vfsend.send((buf, time)).expect("video frame should be sent");
244 while let Some((buf, time)) = vdec.more_frames(true) {
245 vfsend.send((buf, time)).expect("video frame should be sent");
249 Ok(PktSendEvent::GetFrames) => {
250 while let Some((buf, time)) = vdec.more_frames(false) {
251 vfsend.send((buf, time)).expect("video frame should be sent");
253 VDEC_STATE.set_state(DecodingState::Waiting);
255 Ok(PktSendEvent::Flush) => {
257 VDEC_STATE.set_state(DecodingState::Waiting);
259 Ok(PktSendEvent::End) => {
260 while vdec.yuv_pool.get_free().is_some() && vdec.rgb_pool.get_free().is_some() {
261 if let Some(frm) = vdec.last_frame() {
262 vfsend.send(frm).expect("video frame should be sent");
267 VDEC_STATE.set_state(DecodingState::End);
270 Ok(PktSendEvent::ImmediateEnd) => {
271 VDEC_STATE.set_state(DecodingState::End);
274 Ok(PktSendEvent::HurryUp) => {
275 skip_mode = skip_mode.advance();
276 if let DecoderType::Video(ref mut dec, ref mut _reord) = vdec.dec.dec {
277 dec.set_options(&[NAOption{
278 name: FRAME_SKIP_OPTION,
279 value: NAValue::String(skip_mode.to_string()),
292 fn advance(&self) -> Self;
295 impl Advance for FrameSkipMode {
296 fn advance(&self) -> Self {
298 FrameSkipMode::None => FrameSkipMode::KeyframesOnly,
299 FrameSkipMode::KeyframesOnly => FrameSkipMode::IntraOnly,
300 FrameSkipMode::IntraOnly => FrameSkipMode::None,
305 fn output_yuv(yuv_texture: &mut Texture, buf: &NAVideoBuffer<u8>, width: usize, height: usize) {
306 let src = buf.get_data();
307 let ysstride = buf.get_stride(0);
308 let ysrc = &src[buf.get_offset(0)..];
309 let usstride = buf.get_stride(2);
310 let usrc = &src[buf.get_offset(2)..];
311 let vsstride = buf.get_stride(1);
312 let vsrc = &src[buf.get_offset(1)..];
313 yuv_texture.with_lock(None, |buffer: &mut [u8], pitch: usize| {
314 let csize = pitch.min(width);
315 for (dline, sline) in buffer.chunks_exact_mut(pitch).take(height).zip(ysrc.chunks_exact(ysstride)) {
316 dline[..csize].copy_from_slice(&sline[..csize]);
318 let coff = pitch * height;
319 let csize = (pitch / 2).min(width / 2);
320 for (dline, sline) in buffer[coff..].chunks_exact_mut(pitch / 2).take(height/2).zip(vsrc.chunks(vsstride)) {
321 dline[..csize].copy_from_slice(&sline[..csize]);
323 let coff = pitch * height + (pitch / 2) * (height / 2);
324 for (dline, sline) in buffer[coff..].chunks_exact_mut(pitch / 2).take(height/2).zip(usrc.chunks(usstride)) {
325 dline[..csize].copy_from_slice(&sline[..csize]);
327 }).expect("surface should be locked");
331 pub struct VideoControl {
332 vqueue: Vec<PktSendEvent>,
333 vpsend: SyncSender<PktSendEvent>,
334 vfrecv: Receiver<FrameRecord>,
336 vthread: JoinHandle<()>,
340 pub fn new(video_dec: Option<DecoderStuff>, width: usize, height: usize, tb_num: u32, tb_den: u32) -> Self {
341 let (vpsend, vprecv) = std::sync::mpsc::sync_channel::<PktSendEvent>(0);
342 let (vfsend, vfrecv) = std::sync::mpsc::sync_channel::<FrameRecord>(FRAME_QUEUE_SIZE - 1);
344 VDEC_STATE.set_state(DecodingState::Normal);
346 let vthread = if let Some(video_dec) = video_dec {
347 start_video_decoding(width, height, tb_num, tb_den, video_dec, vprecv, vfsend)
349 thread::Builder::new().name("vdecoder-dummy".to_string()).spawn(move ||{
351 match vprecv.recv() {
352 Ok(PktSendEvent::End) => break,
353 Ok(PktSendEvent::ImmediateEnd) => break,
360 VDEC_STATE.set_state(DecodingState::End);
366 vqueue: Vec::with_capacity(FRAME_QUEUE_LEN),
372 pub fn flush(&mut self) {
374 VDEC_STATE.set_state(DecodingState::Flush);
376 let _ = self.vfrecv.try_recv();
378 let _ = self.vpsend.send(PktSendEvent::Flush);
379 while self.vfrecv.try_recv().is_ok() { }
381 pub fn get_queue_size(&self) -> usize { self.vqueue.len() }
382 pub fn is_filled(&self, size: usize) -> bool {
383 self.vqueue.len() >= size
385 pub fn try_send_video(&mut self, evt: PktSendEvent) -> bool {
386 if self.vqueue.len() > 0 {
387 self.vqueue.push(evt);
390 self.try_send_event(evt)
393 fn try_send_event(&mut self, evt: PktSendEvent) -> bool {
394 if let Err(TrySendError::Full(evt)) = self.vpsend.try_send(evt) {
395 self.vqueue.insert(0, evt);
401 pub fn try_send_queued(&mut self) -> bool {
402 while !self.vqueue.is_empty() {
403 let pkt = self.vqueue.remove(0);
404 if !self.try_send_event(pkt) {
410 pub fn is_video_end(&self) -> bool {
411 matches!(VDEC_STATE.get_state(), DecodingState::End | DecodingState::Error)
413 pub fn wait_for_frames(&mut self) -> Result<(), ()> {
414 VDEC_STATE.set_state(DecodingState::Prefetch);
415 self.try_send_event(PktSendEvent::GetFrames);
416 while !self.try_send_queued() {
419 match VDEC_STATE.get_state() {
420 DecodingState::Waiting => {
421 VDEC_STATE.set_state(DecodingState::Normal);
424 DecodingState::Prefetch => thread::yield_now(),
430 pub fn is_yuv(&self) -> bool { self.do_yuv }
432 pub fn fill(&mut self, disp_queue: &mut DispQueue) {
433 while !disp_queue.is_full() {
434 let is_empty = disp_queue.is_empty();
435 if let Ok((pic, time)) = self.vfrecv.try_recv() {
436 let buf = pic.get_vbuf().expect("video frame should be of u8 type");
437 self.do_yuv = buf.get_info().get_format().get_model().is_yuv();
438 let idx = disp_queue.end;
439 disp_queue.move_end();
440 let frm = &mut disp_queue.pool[idx];
442 let sstride = buf.get_stride(0);
443 let src = buf.get_data();
444 frm.rgb_tex.with_lock(None, |buffer: &mut [u8], pitch: usize| {
445 let csize = sstride.min(pitch);
446 for (dst, src) in buffer.chunks_mut(pitch).zip(src.chunks(sstride)) {
447 (&mut dst[..csize]).copy_from_slice(&src[..csize]);
450 }).expect("surface should be locked");
452 output_yuv(&mut frm.yuv_tex, &buf, disp_queue.width, disp_queue.height);
455 frm.is_yuv = self.do_yuv;
458 disp_queue.first_ts = time;
460 disp_queue.last_ts = time;
467 pub fn finish(self) {
468 VDEC_STATE.set_state(DecodingState::Flush);
470 let _ = self.vfrecv.try_recv();
472 let _ = self.vpsend.send(PktSendEvent::ImmediateEnd);
473 self.vthread.join().unwrap();