]>
Commit | Line | Data |
---|---|---|
1 | extern crate sdl2; | |
2 | extern crate nihav_core; | |
3 | extern crate nihav_registry; | |
4 | extern crate nihav_allstuff; | |
5 | ||
6 | use std::env; | |
7 | use std::fs::File; | |
8 | use std::io::Write; | |
9 | use std::path::Path; | |
10 | use std::time::{Duration, Instant}; | |
11 | use std::thread; | |
12 | ||
13 | use sdl2::event::{Event, WindowEvent}; | |
14 | use sdl2::keyboard::Keycode; | |
15 | use sdl2::mouse::MouseButton; | |
16 | use sdl2::render::{Canvas, Texture, TextureCreator}; | |
17 | use sdl2::pixels::PixelFormatEnum; | |
18 | use sdl2::video::{Window, WindowContext}; | |
19 | ||
20 | use nihav_registry::detect; | |
21 | use nihav_core::frame::*; | |
22 | use nihav_core::io::byteio::{FileReader, ByteReader}; | |
23 | use nihav_core::reorder::*; | |
24 | use nihav_core::codecs::*; | |
25 | use nihav_core::demuxers::*; | |
26 | use nihav_registry::register::*; | |
27 | use nihav_allstuff::*; | |
28 | ||
29 | mod audiodec; | |
30 | use audiodec::*; | |
31 | mod videodec; | |
32 | use videodec::*; | |
33 | mod osd; | |
34 | use osd::*; | |
35 | ||
36 | #[cfg(feature="debug")] | |
37 | macro_rules! debug_log { | |
38 | ($log: expr; $blk: block) => { | |
39 | $log.logfile.write($blk.as_bytes()).unwrap(); | |
40 | $log.logfile.write(b"\n").unwrap(); | |
41 | }; | |
42 | } | |
43 | #[cfg(not(feature="debug"))] | |
44 | macro_rules! debug_log { | |
45 | ($log: expr; $blk: block) => {}; | |
46 | } | |
47 | ||
48 | pub enum PktSendEvent { | |
49 | Packet(NAPacket), | |
50 | GetFrames, | |
51 | Flush, | |
52 | End, | |
53 | ImmediateEnd, | |
54 | HurryUp, | |
55 | } | |
56 | ||
57 | pub enum DecoderType { | |
58 | Audio(Box<dyn NADecoder + Send>), | |
59 | Video(Box<dyn NADecoder + Send>, Box<dyn FrameReorderer + Send>), | |
60 | VideoMT(Box<dyn NADecoderMT + Send>, MTFrameReorderer), | |
61 | } | |
62 | ||
63 | pub struct DecoderStuff { | |
64 | pub dsupp: Box<NADecoderSupport>, | |
65 | pub dec: DecoderType, | |
66 | } | |
67 | ||
68 | fn format_time(ms: u64) -> String { | |
69 | let s = ms / 1000; | |
70 | let ds = (ms % 1000) / 100; | |
71 | let (min, s) = (s / 60, s % 60); | |
72 | let (h, min) = (min / 60, min % 60); | |
73 | if h == 0 { | |
74 | if min == 0 { | |
75 | format!("{}.{}", s, ds) | |
76 | } else { | |
77 | format!("{}:{:02}.{}", min, s, ds) | |
78 | } | |
79 | } else { | |
80 | format!("{}:{:02}:{:02}.{}", h, min, s, ds) | |
81 | } | |
82 | } | |
83 | ||
84 | const FRAME_QUEUE_LEN: usize = 25; | |
85 | const MAX_VOLUME: usize = 200; | |
86 | ||
87 | pub type FrameRecord = (NABufferType, u64); | |
88 | ||
89 | pub struct TimeKeep { | |
90 | ref_time: Instant, | |
91 | ref_ts: u64, | |
92 | } | |
93 | ||
94 | impl TimeKeep { | |
95 | fn new() -> Self { | |
96 | Self { | |
97 | ref_time: Instant::now(), | |
98 | ref_ts: 0, | |
99 | } | |
100 | } | |
101 | pub fn get_cur_time(&self) -> u64 { | |
102 | let add = self.ref_time.elapsed().as_millis() as u64; | |
103 | self.ref_ts + add | |
104 | } | |
105 | fn reset_ts(&mut self) { | |
106 | self.ref_ts = 0; | |
107 | } | |
108 | fn reset_all(&mut self, ts: u64) { | |
109 | self.ref_time = Instant::now(); | |
110 | self.ref_ts = ts; | |
111 | } | |
112 | fn set_ts(&mut self) { | |
113 | self.ref_ts = self.get_cur_time(); | |
114 | } | |
115 | fn set_time(&mut self) { | |
116 | self.ref_time = Instant::now(); | |
117 | } | |
118 | } | |
119 | ||
120 | pub struct DispFrame<'a> { | |
121 | pub ts: u64, | |
122 | pub is_yuv: bool, | |
123 | pub valid: bool, | |
124 | pub rgb_tex: Texture<'a>, | |
125 | pub yuv_tex: Texture<'a>, | |
126 | } | |
127 | ||
128 | pub struct DispQueue<'a> { | |
129 | pub pool: Vec<DispFrame<'a>>, | |
130 | pub first_ts: u64, | |
131 | pub last_ts: u64, | |
132 | pub start: usize, | |
133 | pub end: usize, | |
134 | pub len: usize, | |
135 | pub width: usize, | |
136 | pub height: usize, | |
137 | } | |
138 | ||
139 | impl<'a> DispQueue<'a> { | |
140 | fn new(texture_creator: &'a TextureCreator<WindowContext>, width: usize, height: usize, len: usize) -> Self { | |
141 | let mut pool = Vec::with_capacity(len); | |
142 | for _ in 0..len + 1 { | |
143 | let rgb_tex = texture_creator.create_texture_streaming(PixelFormatEnum::RGB24, width as u32, height as u32).unwrap(); | |
144 | let yuv_tex = texture_creator.create_texture_streaming(PixelFormatEnum::IYUV, ((width + 1) & !1) as u32, ((height + 1) & !1) as u32).unwrap(); | |
145 | pool.push(DispFrame{ ts: 0, is_yuv: false, valid: false, rgb_tex, yuv_tex }); | |
146 | } | |
147 | pool[len].is_yuv = false; | |
148 | pool[len].rgb_tex.with_lock(None, |buffer: &mut [u8], _pitch: usize| { | |
149 | for el in buffer.iter_mut() { *el = 0; } | |
150 | }).unwrap(); | |
151 | ||
152 | Self { pool, first_ts: 0, last_ts: 0, start: 0, end: 0, len, width, height } | |
153 | } | |
154 | ||
155 | fn flush(&mut self) { | |
156 | self.start = 0; | |
157 | self.end = 0; | |
158 | self.first_ts = 0; | |
159 | self.last_ts = 0; | |
160 | for frm in self.pool.iter_mut() { | |
161 | frm.valid = false; | |
162 | } | |
163 | } | |
164 | ||
165 | fn get_last_texture(&mut self, osd: &OSD) -> &Texture<'a> { | |
166 | if self.pool[self.len].is_yuv { | |
167 | if osd.is_active() { | |
168 | self.pool[self.len].yuv_tex.with_lock(None, |buffer: &mut [u8], pitch: usize| { | |
169 | osd.draw_yuv(buffer, pitch); | |
170 | }).unwrap(); | |
171 | } | |
172 | &self.pool[self.len].yuv_tex | |
173 | } else { | |
174 | if osd.is_active() { | |
175 | self.pool[self.len].rgb_tex.with_lock(None, |buffer: &mut [u8], pitch: usize| { | |
176 | osd.draw_rgb(buffer, pitch); | |
177 | }).unwrap(); | |
178 | } | |
179 | &self.pool[self.len].rgb_tex | |
180 | } | |
181 | } | |
182 | pub fn is_empty(&self) -> bool { self.start == self.end } | |
183 | pub fn is_full(&self) -> bool { self.len == 0 || self.start == (self.end + 1) % self.len } | |
184 | pub fn move_end(&mut self) { | |
185 | self.end += 1; | |
186 | if self.end >= self.len { | |
187 | self.end -= self.len; | |
188 | } | |
189 | } | |
190 | pub fn move_start(&mut self) { | |
191 | self.pool.swap(self.start, self.len); | |
192 | self.start += 1; | |
193 | if self.start >= self.len { | |
194 | self.start -= self.len; | |
195 | } | |
196 | if !self.is_empty() { | |
197 | self.first_ts = self.pool[self.start].ts; | |
198 | } | |
199 | } | |
200 | } | |
201 | ||
202 | fn try_display(disp_queue: &mut DispQueue, canvas: &mut Canvas<Window>, osd: &mut OSD, ctime: &TimeKeep) -> Option<u64> { | |
203 | while !disp_queue.is_empty() { | |
204 | let disp_time = disp_queue.first_ts; | |
205 | let ctime = ctime.get_cur_time(); | |
206 | if disp_time > ctime + 10 { | |
207 | return Some(disp_time - ctime); | |
208 | } else if disp_time + 10 < ctime { | |
209 | disp_queue.move_start(); | |
210 | } else { | |
211 | if osd.is_active() { | |
212 | osd.prepare(ctime); | |
213 | } | |
214 | let frm = &mut disp_queue.pool[disp_queue.start]; | |
215 | let texture = if frm.is_yuv { | |
216 | if osd.is_active() { | |
217 | frm.yuv_tex.with_lock(None, |buffer: &mut [u8], pitch: usize| { | |
218 | osd.draw_yuv(buffer, pitch); | |
219 | }).unwrap(); | |
220 | } | |
221 | &frm.yuv_tex | |
222 | } else { | |
223 | if osd.is_active() { | |
224 | frm.rgb_tex.with_lock(None, |buffer: &mut [u8], pitch: usize| { | |
225 | osd.draw_rgb(buffer, pitch); | |
226 | }).unwrap(); | |
227 | } | |
228 | &frm.rgb_tex | |
229 | }; | |
230 | canvas.clear(); | |
231 | canvas.copy(texture, None, None).unwrap(); | |
232 | canvas.present(); | |
233 | ||
234 | disp_queue.move_start(); | |
235 | if !disp_queue.is_empty() { | |
236 | return Some((disp_queue.first_ts - ctime).saturating_sub(2)); | |
237 | } else { | |
238 | return None; | |
239 | } | |
240 | } | |
241 | } | |
242 | None | |
243 | } | |
244 | ||
245 | struct Player { | |
246 | sdl_context: sdl2::Sdl, | |
247 | vsystem: sdl2::VideoSubsystem, | |
248 | asystem: sdl2::AudioSubsystem, | |
249 | ||
250 | acontrol: AudioControl, | |
251 | vcontrol: VideoControl, | |
252 | ||
253 | play_video: bool, | |
254 | play_audio: bool, | |
255 | has_video: bool, | |
256 | has_audio: bool, | |
257 | video_str: u32, | |
258 | audio_str: u32, | |
259 | ||
260 | vthreads: usize, | |
261 | use_mt: bool, | |
262 | ||
263 | paused: bool, | |
264 | mute: bool, | |
265 | volume: usize, | |
266 | end: bool, | |
267 | ||
268 | tkeep: TimeKeep, | |
269 | ||
270 | debug: bool, | |
271 | osd: OSD, | |
272 | ||
273 | #[cfg(feature="debug")] | |
274 | logfile: File, | |
275 | } | |
276 | ||
277 | impl Player { | |
278 | fn new() -> Self { | |
279 | let sdl_context = sdl2::init().unwrap(); | |
280 | let vsystem = sdl_context.video().unwrap(); | |
281 | let asystem = sdl_context.audio().unwrap(); | |
282 | vsystem.disable_screen_saver(); | |
283 | let acontrol = AudioControl::new(None, None, &asystem); | |
284 | let vcontrol = VideoControl::new(None, 0, 0, 0, 0); | |
285 | Self { | |
286 | sdl_context, asystem, vsystem, | |
287 | ||
288 | acontrol, vcontrol, | |
289 | ||
290 | play_video: true, | |
291 | play_audio: true, | |
292 | has_video: false, | |
293 | has_audio: false, | |
294 | video_str: 0, | |
295 | audio_str: 0, | |
296 | ||
297 | vthreads: 3, | |
298 | use_mt: true, | |
299 | ||
300 | paused: false, | |
301 | mute: false, | |
302 | volume: 100, | |
303 | end: false, | |
304 | ||
305 | tkeep: TimeKeep::new(), | |
306 | ||
307 | debug: false, | |
308 | osd: OSD::new(), | |
309 | ||
310 | #[cfg(feature="debug")] | |
311 | logfile: File::create("debug.log").unwrap(), | |
312 | } | |
313 | } | |
314 | fn seek(&mut self, off: u64, fwd: bool, dmx: &mut Demuxer, disp_queue: &mut DispQueue) { | |
315 | let cur_time = self.tkeep.get_cur_time(); | |
316 | let seektime = if fwd { cur_time + off * 1000 } else { | |
317 | cur_time.saturating_sub(off * 1000) }; | |
318 | debug_log!(self; {format!(" seek to {}", seektime)}); | |
319 | ||
320 | let ret = dmx.seek(NATimePoint::Milliseconds(seektime)); | |
321 | if ret.is_err() { | |
322 | println!(" seek error"); | |
323 | return; | |
324 | } | |
325 | ||
326 | self.acontrol.flush(); | |
327 | self.vcontrol.flush(); | |
328 | disp_queue.flush(); | |
329 | ||
330 | self.tkeep.reset_ts(); | |
331 | self.prefill(dmx, disp_queue); | |
332 | if !disp_queue.is_empty() { | |
333 | self.tkeep.reset_all(disp_queue.first_ts); | |
334 | } else { | |
335 | let mut iterations = 0; | |
336 | let mut time = self.acontrol.get_time(); | |
337 | while time.is_none() { | |
338 | iterations += 1; | |
339 | std::thread::yield_now(); | |
340 | if iterations > 1000000 { println!(" still no time set?!"); break; } | |
341 | time = self.acontrol.get_time(); | |
342 | } | |
343 | if let Some(time) = time { | |
344 | self.tkeep.reset_all(time); | |
345 | } | |
346 | } | |
347 | if !self.paused { | |
348 | self.acontrol.resume(); | |
349 | } | |
350 | } | |
351 | fn prefill(&mut self, dmx: &mut Demuxer, disp_queue: &mut DispQueue) { | |
352 | debug_log!(self; {" prefilling"}); | |
353 | while self.vcontrol.get_queue_size() < FRAME_QUEUE_LEN { | |
354 | let mut try_send = self.acontrol.get_queue_size() < FRAME_QUEUE_LEN && (!self.has_video || (!self.vcontrol.is_filled(FRAME_QUEUE_LEN) && !disp_queue.is_full())); | |
355 | ||
356 | if !self.vcontrol.try_send_queued() && self.vcontrol.get_queue_size() > FRAME_QUEUE_LEN / 2 { | |
357 | try_send = false; | |
358 | } | |
359 | if !self.acontrol.try_send_queued() && self.acontrol.get_queue_size() > FRAME_QUEUE_LEN / 2 { | |
360 | try_send = false; | |
361 | } | |
362 | if try_send { | |
363 | match dmx.get_frame() { | |
364 | Err(DemuxerError::EOF) => break, | |
365 | Err(_) => break, | |
366 | Ok(pkt) => { | |
367 | let streamno = pkt.get_stream().get_id(); | |
368 | if self.has_video && streamno == self.video_str { | |
369 | self.vcontrol.try_send_video(PktSendEvent::Packet(pkt)); | |
370 | } else if self.has_audio && streamno == self.audio_str { | |
371 | self.acontrol.try_send_audio(PktSendEvent::Packet(pkt)); | |
372 | } | |
373 | } | |
374 | }; | |
375 | } | |
376 | self.vcontrol.fill(disp_queue); | |
377 | ||
378 | if !try_send { | |
379 | break; | |
380 | } | |
381 | } | |
382 | if self.has_video { | |
383 | while self.vcontrol.get_queue_size() > 0 && !disp_queue.is_full() { | |
384 | self.vcontrol.try_send_queued(); | |
385 | self.vcontrol.fill(disp_queue); | |
386 | std::thread::sleep(Duration::from_millis(10)); | |
387 | } | |
388 | self.vcontrol.wait_for_frames(); | |
389 | self.vcontrol.fill(disp_queue); | |
390 | } | |
391 | debug_log!(self; {format!(" prefilling done, frames {}-{} audio {}", disp_queue.start, disp_queue.end, self.acontrol.get_fill())}); | |
392 | } | |
393 | fn toggle_pause(&mut self) { | |
394 | self.paused = !self.paused; | |
395 | if self.paused { | |
396 | self.vsystem.enable_screen_saver(); | |
397 | self.tkeep.set_ts(); | |
398 | } else { | |
399 | self.vsystem.disable_screen_saver(); | |
400 | self.tkeep.set_time(); | |
401 | } | |
402 | if self.paused { | |
403 | self.acontrol.pause(); | |
404 | } else { | |
405 | self.acontrol.resume(); | |
406 | } | |
407 | } | |
408 | fn handle_events(&mut self, event_pump: &mut sdl2::EventPump, canvas: &mut Canvas<Window>, dmx: &mut Demuxer, disp_queue: &mut DispQueue) -> bool { | |
409 | for event in event_pump.poll_iter() { | |
410 | if let Event::Quit {..} = event { | |
411 | self.end = true; | |
412 | println!(); | |
413 | return true; | |
414 | } | |
415 | if let Event::Window {win_event: WindowEvent::Exposed, ..} = event { | |
416 | canvas.clear(); | |
417 | canvas.copy(disp_queue.get_last_texture(&self.osd), None, None).unwrap(); | |
418 | canvas.present(); | |
419 | } | |
420 | if let Event::MouseButtonDown {mouse_btn: MouseButton::Right, ..} = event { | |
421 | self.toggle_pause(); | |
422 | } | |
423 | if let Event::KeyDown {keycode: Some(keycode), ..} = event { | |
424 | match keycode { | |
425 | Keycode::Escape | Keycode::Q => { | |
426 | self.end = true; | |
427 | println!(); | |
428 | return true; | |
429 | }, | |
430 | Keycode::Return => return true, | |
431 | Keycode::Right => { self.seek(10, true, dmx, disp_queue); }, | |
432 | Keycode::Left => { self.seek(10, false, dmx, disp_queue); }, | |
433 | Keycode::Up => { self.seek(60, true, dmx, disp_queue); }, | |
434 | Keycode::Down => { self.seek(60, false, dmx, disp_queue); }, | |
435 | Keycode::PageUp => { self.seek(600, true, dmx, disp_queue); }, | |
436 | Keycode::PageDown => { self.seek(600, false, dmx, disp_queue); }, | |
437 | Keycode::Space => { self.toggle_pause(); }, | |
438 | Keycode::Plus | Keycode::KpPlus => { | |
439 | self.volume = (self.volume + 10).min(MAX_VOLUME); | |
440 | if !self.mute { | |
441 | self.acontrol.set_volume(self.volume); | |
442 | } | |
443 | }, | |
444 | Keycode::Minus | Keycode::KpMinus => { | |
445 | self.volume = self.volume.saturating_sub(10); | |
446 | if !self.mute { | |
447 | self.acontrol.set_volume(self.volume); | |
448 | } | |
449 | }, | |
450 | Keycode::D => { | |
451 | self.debug = !self.debug; | |
452 | }, | |
453 | Keycode::M => { | |
454 | self.mute = !self.mute; | |
455 | if self.mute { | |
456 | self.acontrol.set_volume(0); | |
457 | } else { | |
458 | self.acontrol.set_volume(self.volume); | |
459 | } | |
460 | }, | |
461 | Keycode::H => { | |
462 | self.vcontrol.try_send_video(PktSendEvent::HurryUp); | |
463 | }, | |
464 | Keycode::O => { | |
465 | self.osd.toggle(); | |
466 | }, | |
467 | _ => {}, | |
468 | }; | |
469 | if !self.paused { | |
470 | print!("{:60}\r", ' '); | |
471 | std::io::stdout().flush().unwrap(); | |
472 | } | |
473 | } | |
474 | } | |
475 | false | |
476 | } | |
477 | fn play(&mut self, name: &str, start_time: NATimePoint) { | |
478 | debug_log!(self; {format!("Playing {}", name)}); | |
479 | ||
480 | // prepare data source | |
481 | let path = Path::new(name); | |
482 | let mut file = File::open(path).unwrap(); | |
483 | let dmx_fact; | |
484 | let mut fr = FileReader::new_read(&mut file); | |
485 | let mut br = ByteReader::new(&mut fr); | |
486 | let res = detect::detect_format(name, &mut br); | |
487 | if res.is_none() { | |
488 | println!("cannot detect format for {}", name); | |
489 | return; | |
490 | } | |
491 | let (dmx_name, _score) = res.unwrap(); | |
492 | debug_log!(self; {format!(" found demuxer {} with score {:?}", dmx_name, _score)}); | |
493 | println!("trying demuxer {} on {}", dmx_name, name); | |
494 | ||
495 | let mut dmx_reg = RegisteredDemuxers::new(); | |
496 | nihav_register_all_demuxers(&mut dmx_reg); | |
497 | let mut dec_reg = RegisteredDecoders::new(); | |
498 | nihav_register_all_decoders(&mut dec_reg); | |
499 | let mut mtdec_reg = RegisteredMTDecoders::new(); | |
500 | if self.use_mt { | |
501 | nihav_register_all_mt_decoders(&mut mtdec_reg); | |
502 | } | |
503 | ||
504 | let ret = dmx_reg.find_demuxer(dmx_name); | |
505 | if ret.is_none() { | |
506 | println!("error finding {} demuxer", dmx_name); | |
507 | return; | |
508 | } | |
509 | dmx_fact = ret.unwrap(); | |
510 | br.seek(SeekFrom::Start(0)).unwrap(); | |
511 | let ret = create_demuxer(dmx_fact, &mut br); | |
512 | if ret.is_err() { | |
513 | println!("error creating demuxer"); | |
514 | return; | |
515 | } | |
516 | let mut dmx = ret.unwrap(); | |
517 | if start_time != NATimePoint::None { | |
518 | debug_log!(self; {format!(" start seek to {}", start_time)}); | |
519 | if dmx.seek(start_time).is_err() { | |
520 | println!("initial seek failed"); | |
521 | } | |
522 | } | |
523 | ||
524 | let mut width = 640; | |
525 | let mut height = 480; | |
526 | let mut tb_num = 0; | |
527 | let mut tb_den = 0; | |
528 | let mut ainfo: Option<NAAudioInfo> = None; | |
529 | ||
530 | let mut video_dec: Option<DecoderStuff> = None; | |
531 | let mut audio_dec: Option<DecoderStuff> = None; | |
532 | ||
533 | let duration = dmx.get_duration(); | |
534 | if duration != 0 { | |
535 | println!(" total duration {}", format_time(duration)); | |
536 | } | |
537 | self.has_video = false; | |
538 | self.has_audio = false; | |
539 | for i in 0..dmx.get_num_streams() { | |
540 | let s = dmx.get_stream(i).unwrap(); | |
541 | let info = s.get_info(); | |
542 | let decfunc = dec_reg.find_decoder(info.get_name()); | |
543 | let decfunc_mt = mtdec_reg.find_decoder(info.get_name()); | |
544 | println!("stream {} - {} {}", i, s, info.get_name()); | |
545 | debug_log!(self; {format!(" stream {} - {} {}", i, s, info.get_name())}); | |
546 | let str_id = s.get_id(); | |
547 | if info.is_video() { | |
548 | if video_dec.is_none() && self.play_video { | |
549 | if let Some(decfunc) = decfunc_mt { | |
550 | let mut dec = (decfunc)(); | |
551 | let mut dsupp = Box::new(NADecoderSupport::new()); | |
552 | let props = info.get_properties().get_video_info().unwrap(); | |
553 | if props.get_width() != 0 { | |
554 | width = props.get_width(); | |
555 | height = props.get_height(); | |
556 | } | |
557 | if dec.init(&mut dsupp, info.clone(), self.vthreads).is_ok() { | |
558 | video_dec = Some(DecoderStuff{ dsupp, dec: DecoderType::VideoMT(dec, MTFrameReorderer::new()) }); | |
559 | self.video_str = str_id; | |
560 | let (tbn, tbd) = s.get_timebase(); | |
561 | tb_num = tbn; | |
562 | tb_den = tbd; | |
563 | self.has_video = true; | |
564 | continue; | |
565 | } else { | |
566 | println!("failed to create multi-threaded decoder, falling back"); | |
567 | } | |
568 | } | |
569 | if let Some(decfunc) = decfunc { | |
570 | let mut dec = (decfunc)(); | |
571 | let mut dsupp = Box::new(NADecoderSupport::new()); | |
572 | let props = info.get_properties().get_video_info().unwrap(); | |
573 | if props.get_width() != 0 { | |
574 | width = props.get_width(); | |
575 | height = props.get_height(); | |
576 | } | |
577 | let desc = get_codec_description(info.get_name()); | |
578 | let (reorder_depth, reord) = if desc.is_none() || (desc.unwrap().caps & CODEC_CAP_COMPLEX_REORDER) == 0 { | |
579 | let reord: Box<dyn FrameReorderer + Send> = Box::new(IPBReorderer::new()); | |
580 | (3, reord) | |
581 | } else { | |
582 | let reord: Box<dyn FrameReorderer + Send> = Box::new(ComplexReorderer::new()); | |
583 | (16, reord) | |
584 | }; | |
585 | dsupp.pool_u8 = NAVideoBufferPool::new(reorder_depth); | |
586 | dsupp.pool_u16 = NAVideoBufferPool::new(reorder_depth); | |
587 | dsupp.pool_u32 = NAVideoBufferPool::new(reorder_depth); | |
588 | dec.init(&mut dsupp, info).unwrap(); | |
589 | video_dec = Some(DecoderStuff{ dsupp, dec: DecoderType::Video(dec, reord) }); | |
590 | self.video_str = str_id; | |
591 | let (tbn, tbd) = s.get_timebase(); | |
592 | tb_num = tbn; | |
593 | tb_den = tbd; | |
594 | self.has_video = true; | |
595 | } else { | |
596 | println!("no video decoder for {} found!", info.get_name()); | |
597 | } | |
598 | } | |
599 | } else if info.is_audio() { | |
600 | if audio_dec.is_none() && self.play_audio { | |
601 | if let Some(decfunc) = decfunc { | |
602 | let mut dec = (decfunc)(); | |
603 | let mut dsupp = Box::new(NADecoderSupport::new()); | |
604 | ainfo = info.get_properties().get_audio_info(); | |
605 | dec.init(&mut dsupp, info).unwrap(); | |
606 | audio_dec = Some(DecoderStuff{ dsupp, dec: DecoderType::Audio(dec) }); | |
607 | self.audio_str = str_id; | |
608 | self.has_audio = true; | |
609 | } else { | |
610 | println!("no audio decoder for {} found!", info.get_name()); | |
611 | } | |
612 | } | |
613 | } else { | |
614 | println!("decoder {} not found", info.get_name()); | |
615 | } | |
616 | } | |
617 | if !self.has_video && !self.has_audio { | |
618 | println!("No playable streams found."); | |
619 | return; | |
620 | } | |
621 | ||
622 | while (width <= 384) && (height <= 288) { | |
623 | width <<= 1; | |
624 | height <<= 1; | |
625 | } | |
626 | ||
627 | // prepare playback structure | |
628 | let mut new_vcontrol = VideoControl::new(video_dec, width, height, tb_num, tb_den); | |
629 | std::mem::swap(&mut self.vcontrol, &mut new_vcontrol); | |
630 | ||
631 | let mut new_acontrol = AudioControl::new(audio_dec, ainfo, &self.asystem); | |
632 | std::mem::swap(&mut self.acontrol, &mut new_acontrol); | |
633 | ||
634 | if self.mute { | |
635 | self.acontrol.set_volume(0); | |
636 | } else { | |
637 | self.acontrol.set_volume(self.volume); | |
638 | } | |
639 | ||
640 | let fname = path.file_name(); | |
641 | let wname = if let Some(fname) = fname { | |
642 | "NihAV player - ".to_owned() + fname.to_str().unwrap() | |
643 | } else { | |
644 | "NihAV player".to_owned() | |
645 | }; | |
646 | let window = self.vsystem.window(&wname, width as u32, height as u32) | |
647 | .position_centered().build().unwrap(); | |
648 | let mut canvas = window.into_canvas().build().unwrap(); | |
649 | let texture_creator = canvas.texture_creator(); | |
650 | let mut disp_q = DispQueue::new(&texture_creator, width, height, if self.has_video { FRAME_QUEUE_LEN } else { 0 }); | |
651 | if !self.has_video { | |
652 | canvas.clear(); | |
653 | canvas.copy(disp_q.get_last_texture(&self.osd), None, None).unwrap(); | |
654 | canvas.present(); | |
655 | } | |
656 | ||
657 | self.has_audio = self.acontrol.has_audio(); | |
658 | if !self.has_video && !self.has_audio { | |
659 | println!("No playable streams."); | |
660 | return; | |
661 | } | |
662 | ||
663 | // play | |
664 | self.prefill(&mut dmx, &mut disp_q); | |
665 | self.tkeep.reset_all(0); | |
666 | if !self.paused { | |
667 | self.acontrol.resume(); | |
668 | } | |
669 | let mut event_pump = self.sdl_context.event_pump().unwrap(); | |
670 | let mut last_disp = Instant::now(); | |
671 | let mut has_data = true; | |
672 | 'main: loop { | |
673 | if self.handle_events(&mut event_pump, &mut canvas, &mut dmx, &mut disp_q) { | |
674 | println!(); | |
675 | break 'main; | |
676 | } | |
677 | if !self.paused { | |
678 | let mut try_send = self.acontrol.get_queue_size() < FRAME_QUEUE_LEN && self.vcontrol.get_queue_size() < FRAME_QUEUE_LEN; | |
679 | if !self.vcontrol.try_send_queued() && self.vcontrol.is_filled(FRAME_QUEUE_LEN) { | |
680 | try_send = false; | |
681 | } | |
682 | if !self.acontrol.try_send_queued() { | |
683 | try_send = false; | |
684 | } | |
685 | while has_data && try_send { | |
686 | match dmx.get_frame() { | |
687 | Err(DemuxerError::EOF) => { | |
688 | self.vcontrol.try_send_video(PktSendEvent::End); | |
689 | self.acontrol.try_send_audio(PktSendEvent::End); | |
690 | has_data = false; | |
691 | }, | |
692 | Err(err) => { println!("demuxer error {:?}", err); }, | |
693 | Ok(pkt) => { | |
694 | let streamno = pkt.get_stream().get_id(); | |
695 | if self.has_video && streamno == self.video_str { | |
696 | debug_log!(self; {" sending video packet"}); | |
697 | self.vcontrol.try_send_video(PktSendEvent::Packet(pkt)); | |
698 | if self.vcontrol.is_filled(FRAME_QUEUE_LEN) { | |
699 | try_send = false; | |
700 | } | |
701 | } else if self.has_audio && streamno == self.audio_str { | |
702 | debug_log!(self; {" sending audio packet"}); | |
703 | self.acontrol.try_send_audio(PktSendEvent::Packet(pkt)); | |
704 | if self.acontrol.get_queue_size() >= FRAME_QUEUE_LEN { | |
705 | try_send = false; | |
706 | } | |
707 | } | |
708 | } | |
709 | }; | |
710 | } | |
711 | self.vcontrol.fill(&mut disp_q); | |
712 | let mut sleep_time = 25; | |
713 | debug_log!(self; {format!(" time {}", self.tkeep.get_cur_time())}); | |
714 | if self.has_video { | |
715 | debug_log!(self; {format!(" disp queue {}-{}, {}-{} vqueue fill {}", disp_q.first_ts, disp_q.last_ts, disp_q.start, disp_q.end, self.vcontrol.get_queue_size())}); | |
716 | let ret = try_display(&mut disp_q, &mut canvas, &mut self.osd, &self.tkeep); | |
717 | if let Some(next_time) = ret { | |
718 | sleep_time = sleep_time.min(next_time); | |
719 | } | |
720 | } | |
721 | if self.has_audio { | |
722 | let time_left = self.acontrol.get_time_left(); | |
723 | debug_log!(self; {format!(" audio left {}", time_left)}); | |
724 | sleep_time = sleep_time.min(time_left); | |
725 | } | |
726 | debug_log!(self; {format!(" sleep {}ms", sleep_time)}); | |
727 | if last_disp.elapsed().as_millis() >= 10 { | |
728 | let c_time = self.tkeep.get_cur_time(); | |
729 | ||
730 | if !self.debug { | |
731 | print!(" {} {}% \r", format_time(c_time), self.acontrol.get_volume()); | |
732 | } else { | |
733 | print!(" {} {} {}% {:3} {:6}\r", format_time(c_time), if self.vcontrol.is_yuv() { 'Y' } else { 'R' }, self.acontrol.get_volume(), (disp_q.end + disp_q.len - disp_q.start) % disp_q.len, self.acontrol.get_fill()); | |
734 | } | |
735 | std::io::stdout().flush().unwrap(); | |
736 | last_disp = Instant::now(); | |
737 | } | |
738 | let mut end = true; | |
739 | if self.has_video && !self.vcontrol.is_video_end() { | |
740 | end = false; | |
741 | } | |
742 | if self.has_audio && !self.acontrol.is_audio_end() { | |
743 | end = false; | |
744 | } | |
745 | if end { | |
746 | break; | |
747 | } | |
748 | thread::sleep(Duration::from_millis(sleep_time)); | |
749 | } else { | |
750 | thread::sleep(Duration::from_millis(20)); | |
751 | } | |
752 | } | |
753 | println!(); | |
754 | std::mem::swap(&mut self.vcontrol, &mut new_vcontrol); | |
755 | new_vcontrol.finish(); | |
756 | std::mem::swap(&mut self.acontrol, &mut new_acontrol); | |
757 | new_acontrol.finish(); | |
758 | } | |
759 | } | |
760 | ||
761 | fn main() { | |
762 | let args: Vec<String> = env::args().collect(); | |
763 | ||
764 | if args.len() == 1 { | |
765 | println!("usage: nihav-player file1 file2 ..."); | |
766 | return; | |
767 | } | |
768 | ||
769 | let mut player = Player::new(); | |
770 | ||
771 | let mut aiter = args.iter().skip(1); | |
772 | let mut seek_time = NATimePoint::None; | |
773 | while let Some(arg) = aiter.next() { | |
774 | match arg.as_str() { | |
775 | "-an" => { player.play_audio = false; }, | |
776 | "-ae" => { player.play_audio = true; }, | |
777 | "-vn" => { player.play_video = false; }, | |
778 | "-ve" => { player.play_video = true; }, | |
779 | "-seek" => { | |
780 | if let Some(arg) = aiter.next() { | |
781 | if let Ok(time) = arg.parse::<NATimePoint>() { | |
782 | seek_time = time; | |
783 | } else { | |
784 | println!("wrong seek time"); | |
785 | seek_time = NATimePoint::None; | |
786 | } | |
787 | } | |
788 | }, | |
789 | "-vol" => { | |
790 | if let Some(arg) = aiter.next() { | |
791 | if let Ok(vol) = arg.parse::<usize>() { | |
792 | player.volume = vol.min(MAX_VOLUME); | |
793 | } else { | |
794 | println!("wrong volume"); | |
795 | } | |
796 | } | |
797 | }, | |
798 | "-debug" => { | |
799 | player.debug = true; | |
800 | }, | |
801 | "-nodebug" => { | |
802 | player.debug = false; | |
803 | }, | |
804 | "-mt" => { | |
805 | player.use_mt = true; | |
806 | }, | |
807 | "-nomt" => { | |
808 | player.use_mt = false; | |
809 | }, | |
810 | "-threads" => { | |
811 | if let Some(arg) = aiter.next() { | |
812 | if let Ok(val) = arg.parse::<usize>() { | |
813 | player.vthreads = val.max(1); | |
814 | } else { | |
815 | println!("wrong number of threads"); | |
816 | } | |
817 | } | |
818 | }, | |
819 | _ => { | |
820 | player.play(arg, seek_time); | |
821 | if player.end { break; } | |
822 | seek_time = NATimePoint::None; | |
823 | }, | |
824 | }; | |
825 | } | |
826 | } |