]>
Commit | Line | Data |
---|---|---|
69b93cb5 KS |
1 | extern crate sdl2; |
2 | extern crate nihav_core; | |
3 | extern crate nihav_registry; | |
4 | extern crate nihav_allstuff; | |
5 | ||
6 | use std::env; | |
7 | use std::fs::File; | |
8 | use std::io::Write; | |
9 | use std::path::Path; | |
10 | use std::time::{Duration, Instant}; | |
11 | use std::thread; | |
12 | ||
13 | use sdl2::event::{Event, WindowEvent}; | |
14 | use sdl2::keyboard::Keycode; | |
15 | use sdl2::render::{Canvas, Texture, TextureCreator}; | |
16 | use sdl2::pixels::PixelFormatEnum; | |
17 | use sdl2::video::{Window, WindowContext}; | |
18 | ||
19 | use nihav_registry::detect; | |
20 | use nihav_core::frame::*; | |
21 | use nihav_core::io::byteio::{FileReader, ByteReader}; | |
22 | use nihav_core::reorder::*; | |
23 | use nihav_core::codecs::*; | |
24 | use nihav_core::demuxers::*; | |
25 | use nihav_registry::register::*; | |
26 | use nihav_allstuff::*; | |
27 | ||
28 | mod audiodec; | |
29 | use audiodec::*; | |
30 | mod videodec; | |
31 | use videodec::*; | |
32 | ||
33 | #[cfg(feature="debug")] | |
34 | macro_rules! debug_log { | |
35 | ($log: expr; $blk: block) => { | |
36 | $log.logfile.write($blk.as_bytes()).unwrap(); | |
37 | $log.logfile.write(b"\n").unwrap(); | |
38 | }; | |
39 | } | |
40 | #[cfg(not(feature="debug"))] | |
41 | macro_rules! debug_log { | |
42 | ($log: expr; $blk: block) => {}; | |
43 | } | |
44 | ||
45 | pub enum PktSendEvent { | |
46 | Packet(NAPacket), | |
47 | Flush, | |
48 | End, | |
49 | ImmediateEnd, | |
50 | HurryUp, | |
51 | } | |
52 | ||
53 | pub struct DecoderStuff { | |
54 | pub dsupp: Box<NADecoderSupport>, | |
55 | pub dec: Box<dyn NADecoder + Send>, | |
56 | pub reord: Box<dyn FrameReorderer + Send>, | |
57 | } | |
58 | ||
59 | fn format_time(ms: u64) -> String { | |
60 | let s = ms / 1000; | |
61 | let ds = (ms % 1000) / 100; | |
62 | let (min, s) = (s / 60, s % 60); | |
63 | let (h, min) = (min / 60, min % 60); | |
64 | if h == 0 { | |
65 | if min == 0 { | |
66 | format!("{}.{}", s, ds) | |
67 | } else { | |
68 | format!("{}:{:02}.{}", min, s, ds) | |
69 | } | |
70 | } else { | |
71 | format!("{}:{:02}:{:02}.{}", h, min, s, ds) | |
72 | } | |
73 | } | |
74 | ||
75 | const FRAME_QUEUE_LEN: usize = 25; | |
76 | const MAX_VOLUME: usize = 200; | |
77 | ||
78 | pub type FrameRecord = (NABufferType, u64); | |
79 | ||
80 | pub struct TimeKeep { | |
81 | ref_time: Instant, | |
82 | ref_ts: u64, | |
83 | } | |
84 | ||
85 | impl TimeKeep { | |
86 | fn new() -> Self { | |
87 | Self { | |
88 | ref_time: Instant::now(), | |
89 | ref_ts: 0, | |
90 | } | |
91 | } | |
92 | pub fn get_cur_time(&self) -> u64 { | |
93 | let add = self.ref_time.elapsed().as_millis() as u64; | |
94 | self.ref_ts + add | |
95 | } | |
96 | fn reset_ts(&mut self) { | |
97 | self.ref_ts = 0; | |
98 | } | |
99 | fn reset_all(&mut self, ts: u64) { | |
100 | self.ref_time = Instant::now(); | |
101 | self.ref_ts = ts; | |
102 | } | |
103 | fn set_ts(&mut self) { | |
104 | self.ref_ts = self.get_cur_time(); | |
105 | } | |
106 | fn set_time(&mut self) { | |
107 | self.ref_time = Instant::now(); | |
108 | } | |
109 | } | |
110 | ||
111 | pub struct DispFrame<'a> { | |
112 | pub ts: u64, | |
113 | pub is_yuv: bool, | |
114 | pub valid: bool, | |
115 | pub rgb_tex: Texture<'a>, | |
116 | pub yuv_tex: Texture<'a>, | |
117 | } | |
118 | ||
119 | pub struct DispQueue<'a> { | |
120 | pub pool: Vec<DispFrame<'a>>, | |
121 | pub first_ts: u64, | |
122 | pub last_ts: u64, | |
123 | pub start: usize, | |
124 | pub end: usize, | |
125 | pub len: usize, | |
126 | pub width: usize, | |
127 | pub height: usize, | |
128 | } | |
129 | ||
130 | impl<'a> DispQueue<'a> { | |
131 | fn new(texture_creator: &'a TextureCreator<WindowContext>, width: usize, height: usize, len: usize) -> Self { | |
132 | let mut pool = Vec::with_capacity(len); | |
133 | for _ in 0..len + 1 { | |
134 | let rgb_tex = texture_creator.create_texture_streaming(PixelFormatEnum::RGB24, width as u32, height as u32).unwrap(); | |
135 | let yuv_tex = texture_creator.create_texture_streaming(PixelFormatEnum::IYUV, ((width + 1) & !1) as u32, ((height + 1) & !1) as u32).unwrap(); | |
136 | pool.push(DispFrame{ ts: 0, is_yuv: false, valid: false, rgb_tex, yuv_tex }); | |
137 | } | |
138 | pool[len].is_yuv = false; | |
139 | pool[len].rgb_tex.with_lock(None, |buffer: &mut [u8], _pitch: usize| { | |
140 | for el in buffer.iter_mut() { *el = 0; } | |
141 | }).unwrap(); | |
142 | ||
143 | Self { pool, first_ts: 0, last_ts: 0, start: 0, end: 0, len, width, height } | |
144 | } | |
145 | ||
146 | fn flush(&mut self) { | |
147 | self.start = 0; | |
148 | self.end = 0; | |
149 | self.first_ts = 0; | |
150 | self.last_ts = 0; | |
151 | for frm in self.pool.iter_mut() { | |
152 | frm.valid = false; | |
153 | } | |
154 | } | |
155 | ||
156 | fn get_last_texture(&self) -> &Texture<'a> { | |
157 | if self.pool[self.len].is_yuv { | |
158 | &self.pool[self.len].yuv_tex | |
159 | } else { | |
160 | &self.pool[self.len].rgb_tex | |
161 | } | |
162 | } | |
163 | pub fn is_empty(&self) -> bool { self.start == self.end } | |
164 | pub fn is_full(&self) -> bool { self.len == 0 || self.start == (self.end + 1) % self.len } | |
165 | pub fn move_end(&mut self) { | |
166 | self.end += 1; | |
167 | if self.end >= self.len { | |
168 | self.end -= self.len; | |
169 | } | |
170 | } | |
171 | pub fn move_start(&mut self) { | |
172 | self.pool.swap(self.start, self.len); | |
173 | self.start += 1; | |
174 | if self.start >= self.len { | |
175 | self.start -= self.len; | |
176 | } | |
177 | if !self.is_empty() { | |
178 | self.first_ts = self.pool[self.start].ts; | |
179 | } | |
180 | } | |
181 | } | |
182 | ||
183 | fn try_display(disp_queue: &mut DispQueue, canvas: &mut Canvas<Window>, ctime: &TimeKeep) -> Option<u64> { | |
184 | while !disp_queue.is_empty() { | |
185 | let disp_time = disp_queue.first_ts; | |
186 | let ctime = ctime.get_cur_time(); | |
187 | if disp_time > ctime + 10 { | |
188 | return Some(disp_time - ctime); | |
189 | } else if disp_time + 10 < ctime { | |
190 | disp_queue.move_start(); | |
191 | } else { | |
192 | let frm = &disp_queue.pool[disp_queue.start]; | |
193 | let texture = if frm.is_yuv { &frm.yuv_tex } else { &frm.rgb_tex }; | |
194 | canvas.clear(); | |
195 | canvas.copy(texture, None, None).unwrap(); | |
196 | canvas.present(); | |
197 | ||
198 | disp_queue.move_start(); | |
199 | if !disp_queue.is_empty() { | |
200 | return Some((disp_queue.first_ts - ctime).saturating_sub(2)); | |
201 | } else { | |
202 | return None; | |
203 | } | |
204 | } | |
205 | } | |
206 | None | |
207 | } | |
208 | ||
209 | struct Player { | |
210 | sdl_context: sdl2::Sdl, | |
211 | vsystem: sdl2::VideoSubsystem, | |
212 | asystem: sdl2::AudioSubsystem, | |
213 | ||
214 | acontrol: AudioControl, | |
215 | vcontrol: VideoControl, | |
216 | ||
217 | play_video: bool, | |
218 | play_audio: bool, | |
219 | has_video: bool, | |
220 | has_audio: bool, | |
221 | video_str: u32, | |
222 | audio_str: u32, | |
223 | ||
224 | paused: bool, | |
225 | mute: bool, | |
226 | volume: usize, | |
227 | end: bool, | |
228 | ||
229 | tkeep: TimeKeep, | |
230 | ||
231 | debug: bool, | |
232 | ||
233 | #[cfg(feature="debug")] | |
234 | logfile: File, | |
235 | } | |
236 | ||
237 | impl Player { | |
238 | fn new() -> Self { | |
239 | let sdl_context = sdl2::init().unwrap(); | |
240 | let vsystem = sdl_context.video().unwrap(); | |
241 | let asystem = sdl_context.audio().unwrap(); | |
242 | vsystem.disable_screen_saver(); | |
243 | let acontrol = AudioControl::new(None, None, &asystem); | |
244 | let vcontrol = VideoControl::new(None, 0, 0, 0, 0); | |
245 | Self { | |
246 | sdl_context, asystem, vsystem, | |
247 | ||
248 | acontrol, vcontrol, | |
249 | ||
250 | play_video: true, | |
251 | play_audio: true, | |
252 | has_video: false, | |
253 | has_audio: false, | |
254 | video_str: 0, | |
255 | audio_str: 0, | |
256 | ||
257 | paused: false, | |
258 | mute: false, | |
259 | volume: 100, | |
260 | end: false, | |
261 | ||
262 | tkeep: TimeKeep::new(), | |
263 | ||
264 | debug: false, | |
265 | ||
266 | #[cfg(feature="debug")] | |
267 | logfile: File::create("debug.log").unwrap(), | |
268 | } | |
269 | } | |
270 | fn seek(&mut self, off: u64, fwd: bool, dmx: &mut Demuxer, disp_queue: &mut DispQueue) { | |
271 | let cur_time = self.tkeep.get_cur_time(); | |
272 | let seektime = if fwd { cur_time + off * 1000 } else { | |
273 | cur_time.saturating_sub(off * 1000) }; | |
274 | debug_log!(self; {format!(" seek to {}", seektime)}); | |
275 | ||
276 | let ret = dmx.seek(NATimePoint::Milliseconds(seektime)); | |
277 | if ret.is_err() { | |
278 | println!(" seek error"); | |
279 | return; | |
280 | } | |
281 | ||
282 | self.acontrol.flush(); | |
283 | self.vcontrol.flush(); | |
284 | disp_queue.flush(); | |
285 | ||
286 | self.tkeep.reset_ts(); | |
287 | self.prefill(dmx, disp_queue); | |
288 | if !disp_queue.is_empty() { | |
289 | self.tkeep.reset_all(disp_queue.first_ts); | |
290 | } else { | |
291 | let mut iterations = 0; | |
292 | let mut time = self.acontrol.get_time(); | |
293 | while time.is_none() { | |
294 | iterations += 1; | |
295 | std::thread::yield_now(); | |
296 | if iterations > 1000000 { println!(" still no time set?!"); break; } | |
297 | time = self.acontrol.get_time(); | |
298 | } | |
299 | if let Some(time) = time { | |
300 | self.tkeep.reset_all(time); | |
301 | } | |
302 | } | |
303 | if !self.paused { | |
304 | self.acontrol.resume(); | |
305 | } | |
306 | } | |
307 | fn prefill(&mut self, dmx: &mut Demuxer, disp_queue: &mut DispQueue) { | |
308 | debug_log!(self; {" prefilling"}); | |
309 | while self.vcontrol.get_queue_size() < FRAME_QUEUE_LEN { | |
310 | let mut try_send = self.acontrol.get_queue_size() < FRAME_QUEUE_LEN && (!self.has_video || (!self.vcontrol.is_filled(FRAME_QUEUE_LEN) && !disp_queue.is_full())); | |
311 | ||
312 | if !self.vcontrol.try_send_queued() && self.vcontrol.get_queue_size() > FRAME_QUEUE_LEN / 2 { | |
313 | try_send = false; | |
314 | } | |
315 | if !self.acontrol.try_send_queued() && self.acontrol.get_queue_size() > FRAME_QUEUE_LEN / 2 { | |
316 | try_send = false; | |
317 | } | |
318 | if try_send { | |
319 | match dmx.get_frame() { | |
320 | Err(DemuxerError::EOF) => break, | |
321 | Err(_) => break, | |
322 | Ok(pkt) => { | |
323 | let streamno = pkt.get_stream().get_id(); | |
324 | if self.has_video && streamno == self.video_str { | |
325 | self.vcontrol.try_send_video(PktSendEvent::Packet(pkt)); | |
326 | } else if self.has_audio && streamno == self.audio_str { | |
327 | self.acontrol.try_send_audio(PktSendEvent::Packet(pkt)); | |
328 | } | |
329 | } | |
330 | }; | |
331 | } | |
332 | self.vcontrol.fill(disp_queue); | |
333 | ||
334 | if !try_send { | |
335 | break; | |
336 | } | |
337 | } | |
338 | if self.has_video { | |
339 | while self.vcontrol.get_queue_size() > 0 && !disp_queue.is_full() { | |
340 | self.vcontrol.try_send_queued(); | |
341 | self.vcontrol.fill(disp_queue); | |
342 | std::thread::sleep(Duration::from_millis(10)); | |
343 | } | |
344 | self.vcontrol.fill(disp_queue); | |
345 | } | |
346 | debug_log!(self; {format!(" prefilling done, frames {}-{} audio {}", disp_queue.start, disp_queue.end, self.acontrol.get_fill())}); | |
347 | } | |
348 | fn handle_events(&mut self, event_pump: &mut sdl2::EventPump, canvas: &mut Canvas<Window>, dmx: &mut Demuxer, disp_queue: &mut DispQueue) -> bool { | |
349 | for event in event_pump.poll_iter() { | |
350 | if let Event::Quit {..} = event { | |
351 | self.end = true; | |
352 | println!(); | |
353 | return true; | |
354 | } | |
355 | if let Event::Window {win_event: WindowEvent::Exposed, ..} = event { | |
356 | canvas.clear(); | |
357 | canvas.copy(disp_queue.get_last_texture(), None, None).unwrap(); | |
358 | canvas.present(); | |
359 | } | |
360 | if let Event::KeyDown {keycode: Some(keycode), ..} = event { | |
361 | match keycode { | |
362 | Keycode::Escape | Keycode::Q => { | |
363 | self.end = true; | |
364 | println!(); | |
365 | return true; | |
366 | }, | |
367 | Keycode::Return => return true, | |
368 | Keycode::Right => { self.seek(10, true, dmx, disp_queue); }, | |
369 | Keycode::Left => { self.seek(10, false, dmx, disp_queue); }, | |
370 | Keycode::Up => { self.seek(60, true, dmx, disp_queue); }, | |
371 | Keycode::Down => { self.seek(60, false, dmx, disp_queue); }, | |
372 | Keycode::PageUp => { self.seek(600, true, dmx, disp_queue); }, | |
373 | Keycode::PageDown => { self.seek(600, false, dmx, disp_queue); }, | |
374 | Keycode::Space => { | |
375 | self.paused = !self.paused; | |
376 | if self.paused { | |
377 | self.vsystem.enable_screen_saver(); | |
378 | self.tkeep.set_ts(); | |
379 | } else { | |
380 | self.vsystem.disable_screen_saver(); | |
381 | self.tkeep.set_time(); | |
382 | } | |
383 | if self.paused { | |
384 | self.acontrol.pause(); | |
385 | } else { | |
386 | self.acontrol.resume(); | |
387 | } | |
388 | }, | |
389 | Keycode::Plus | Keycode::KpPlus => { | |
390 | self.volume = (self.volume + 10).min(MAX_VOLUME); | |
391 | if !self.mute { | |
392 | self.acontrol.set_volume(self.volume); | |
393 | } | |
394 | }, | |
395 | Keycode::Minus | Keycode::KpMinus => { | |
396 | self.volume = self.volume.saturating_sub(10); | |
397 | if !self.mute { | |
398 | self.acontrol.set_volume(self.volume); | |
399 | } | |
400 | }, | |
401 | Keycode::D => { | |
402 | self.debug = !self.debug; | |
403 | }, | |
404 | Keycode::M => { | |
405 | self.mute = !self.mute; | |
406 | if self.mute { | |
407 | self.acontrol.set_volume(0); | |
408 | } else { | |
409 | self.acontrol.set_volume(self.volume); | |
410 | } | |
411 | }, | |
412 | Keycode::H => { | |
413 | self.vcontrol.try_send_video(PktSendEvent::HurryUp); | |
414 | }, | |
415 | _ => {}, | |
416 | }; | |
417 | if !self.paused { | |
418 | print!("{:60}\r", ' '); | |
419 | std::io::stdout().flush().unwrap(); | |
420 | } | |
421 | } | |
422 | } | |
423 | false | |
424 | } | |
425 | fn play(&mut self, name: &str, start_time: NATimePoint) { | |
426 | debug_log!(self; {format!("Playing {}", name)}); | |
427 | ||
428 | // prepare data source | |
429 | let path = Path::new(name); | |
430 | let mut file = File::open(path).unwrap(); | |
431 | let dmx_fact; | |
432 | let mut fr = FileReader::new_read(&mut file); | |
433 | let mut br = ByteReader::new(&mut fr); | |
434 | let res = detect::detect_format(name, &mut br); | |
435 | if res.is_none() { | |
436 | println!("cannot detect format for {}", name); | |
437 | return; | |
438 | } | |
439 | let (dmx_name, _score) = res.unwrap(); | |
440 | debug_log!(self; {format!(" found demuxer {} with score {:?}", dmx_name, _score)}); | |
441 | println!("trying demuxer {} on {}", dmx_name, name); | |
442 | ||
443 | let mut dmx_reg = RegisteredDemuxers::new(); | |
444 | nihav_register_all_demuxers(&mut dmx_reg); | |
445 | let mut dec_reg = RegisteredDecoders::new(); | |
446 | nihav_register_all_decoders(&mut dec_reg); | |
447 | ||
448 | let ret = dmx_reg.find_demuxer(dmx_name); | |
449 | if ret.is_none() { | |
450 | println!("error finding {} demuxer", dmx_name); | |
451 | return; | |
452 | } | |
453 | dmx_fact = ret.unwrap(); | |
454 | br.seek(SeekFrom::Start(0)).unwrap(); | |
455 | let ret = create_demuxer(dmx_fact, &mut br); | |
456 | if ret.is_err() { | |
457 | println!("error creating demuxer"); | |
458 | return; | |
459 | } | |
460 | let mut dmx = ret.unwrap(); | |
461 | if start_time != NATimePoint::None { | |
462 | debug_log!(self; {format!(" start seek to {}", start_time)}); | |
463 | if dmx.seek(start_time).is_err() { | |
464 | println!("initial seek failed"); | |
465 | } | |
466 | } | |
467 | ||
468 | let mut width = 640; | |
469 | let mut height = 480; | |
470 | let mut tb_num = 0; | |
471 | let mut tb_den = 0; | |
472 | let mut ainfo: Option<NAAudioInfo> = None; | |
473 | ||
474 | let mut video_dec: Option<DecoderStuff> = None; | |
475 | let mut audio_dec: Option<DecoderStuff> = None; | |
476 | ||
477 | let duration = dmx.get_duration(); | |
478 | if duration != 0 { | |
479 | println!(" total duration {}", format_time(duration)); | |
480 | } | |
481 | self.has_video = false; | |
482 | self.has_audio = false; | |
483 | for i in 0..dmx.get_num_streams() { | |
484 | let s = dmx.get_stream(i).unwrap(); | |
485 | let info = s.get_info(); | |
486 | let decfunc = dec_reg.find_decoder(info.get_name()); | |
487 | println!("stream {} - {} {}", i, s, info.get_name()); | |
488 | debug_log!(self; {format!(" stream {} - {} {}", i, s, info.get_name())}); | |
489 | let str_id = s.get_id(); | |
490 | if info.is_video() { | |
491 | if video_dec.is_none() && self.play_video { | |
492 | if let Some(decfunc) = decfunc { | |
493 | let mut dec = (decfunc)(); | |
494 | let mut dsupp = Box::new(NADecoderSupport::new()); | |
495 | let props = info.get_properties().get_video_info().unwrap(); | |
496 | if props.get_width() != 0 { | |
497 | width = props.get_width(); | |
498 | height = props.get_height(); | |
499 | } | |
500 | let desc = get_codec_description(info.get_name()); | |
501 | let (reorder_depth, reord) = if desc.is_none() || (desc.unwrap().caps & CODEC_CAP_COMPLEX_REORDER) == 0 { | |
502 | let reord: Box<dyn FrameReorderer + Send> = Box::new(IPBReorderer::new()); | |
503 | (3, reord) | |
504 | } else { | |
505 | let reord: Box<dyn FrameReorderer + Send> = Box::new(ComplexReorderer::new()); | |
506 | (16, reord) | |
507 | }; | |
508 | dsupp.pool_u8 = NAVideoBufferPool::new(reorder_depth); | |
509 | dsupp.pool_u16 = NAVideoBufferPool::new(reorder_depth); | |
510 | dsupp.pool_u32 = NAVideoBufferPool::new(reorder_depth); | |
511 | dec.init(&mut dsupp, info).unwrap(); | |
512 | video_dec = Some(DecoderStuff{ dsupp, dec, reord }); | |
513 | self.video_str = str_id; | |
514 | let (tbn, tbd) = s.get_timebase(); | |
515 | tb_num = tbn; | |
516 | tb_den = tbd; | |
517 | self.has_video = true; | |
518 | } else { | |
519 | println!("no video decoder for {} found!", info.get_name()); | |
520 | } | |
521 | } | |
522 | } else if info.is_audio() { | |
523 | if audio_dec.is_none() && self.play_audio { | |
524 | if let Some(decfunc) = decfunc { | |
525 | let mut dec = (decfunc)(); | |
526 | let mut dsupp = Box::new(NADecoderSupport::new()); | |
527 | ainfo = info.get_properties().get_audio_info(); | |
528 | dec.init(&mut dsupp, info).unwrap(); | |
529 | let reord = Box::new(NoReorderer::new()); | |
530 | audio_dec = Some(DecoderStuff{ dsupp, dec, reord }); | |
531 | self.audio_str = str_id; | |
532 | self.has_audio = true; | |
533 | } else { | |
534 | println!("no audio decoder for {} found!", info.get_name()); | |
535 | } | |
536 | } | |
537 | } else { | |
538 | println!("decoder {} not found", info.get_name()); | |
539 | } | |
540 | } | |
541 | if !self.has_video && !self.has_audio { | |
542 | println!("No playable streams found."); | |
543 | return; | |
544 | } | |
545 | ||
546 | while (width <= 384) && (height <= 288) { | |
547 | width <<= 1; | |
548 | height <<= 1; | |
549 | } | |
550 | ||
551 | // prepare playback structure | |
552 | let mut new_vcontrol = VideoControl::new(video_dec, width, height, tb_num, tb_den); | |
553 | std::mem::swap(&mut self.vcontrol, &mut new_vcontrol); | |
554 | ||
555 | let mut new_acontrol = AudioControl::new(audio_dec, ainfo, &self.asystem); | |
556 | std::mem::swap(&mut self.acontrol, &mut new_acontrol); | |
557 | ||
558 | if self.mute { | |
559 | self.acontrol.set_volume(0); | |
560 | } else { | |
561 | self.acontrol.set_volume(self.volume); | |
562 | } | |
563 | ||
564 | let fname = path.file_name(); | |
565 | let wname = if let Some(fname) = fname { | |
566 | "NihAV player - ".to_owned() + fname.to_str().unwrap() | |
567 | } else { | |
568 | "NihAV player".to_owned() | |
569 | }; | |
570 | let window = self.vsystem.window(&wname, width as u32, height as u32) | |
571 | .position_centered().build().unwrap(); | |
572 | let mut canvas = window.into_canvas().build().unwrap(); | |
573 | let texture_creator = canvas.texture_creator(); | |
574 | let mut disp_q = DispQueue::new(&texture_creator, width, height, if self.has_video { FRAME_QUEUE_LEN } else { 0 }); | |
575 | if !self.has_video { | |
576 | canvas.clear(); | |
577 | canvas.copy(disp_q.get_last_texture(), None, None).unwrap(); | |
578 | canvas.present(); | |
579 | } | |
580 | ||
581 | self.has_audio = self.acontrol.has_audio(); | |
582 | if !self.has_video && !self.has_audio { | |
583 | println!("No playable streams."); | |
584 | return; | |
585 | } | |
586 | ||
587 | // play | |
588 | self.prefill(&mut dmx, &mut disp_q); | |
589 | self.tkeep.reset_all(0); | |
590 | if !self.paused { | |
591 | self.acontrol.resume(); | |
592 | } | |
593 | let mut event_pump = self.sdl_context.event_pump().unwrap(); | |
594 | let mut last_disp = Instant::now(); | |
595 | let mut has_data = true; | |
596 | 'main: loop { | |
597 | if self.handle_events(&mut event_pump, &mut canvas, &mut dmx, &mut disp_q) { | |
598 | println!(); | |
599 | break 'main; | |
600 | } | |
601 | if !self.paused { | |
602 | let mut try_send = self.acontrol.get_queue_size() < FRAME_QUEUE_LEN && self.vcontrol.get_queue_size() < FRAME_QUEUE_LEN; | |
603 | if !self.vcontrol.try_send_queued() && self.vcontrol.is_filled(FRAME_QUEUE_LEN) { | |
604 | try_send = false; | |
605 | } | |
606 | if !self.acontrol.try_send_queued() { | |
607 | try_send = false; | |
608 | } | |
609 | while has_data && try_send { | |
610 | match dmx.get_frame() { | |
611 | Err(DemuxerError::EOF) => { | |
612 | self.vcontrol.try_send_video(PktSendEvent::End); | |
613 | self.acontrol.try_send_audio(PktSendEvent::End); | |
614 | has_data = false; | |
615 | }, | |
616 | Err(err) => { println!("demuxer error {:?}", err); }, | |
617 | Ok(pkt) => { | |
618 | let streamno = pkt.get_stream().get_id(); | |
619 | if self.has_video && streamno == self.video_str { | |
620 | debug_log!(self; {" sending video packet"}); | |
621 | self.vcontrol.try_send_video(PktSendEvent::Packet(pkt)); | |
622 | if self.vcontrol.is_filled(FRAME_QUEUE_LEN) { | |
623 | try_send = false; | |
624 | } | |
625 | } else if self.has_audio && streamno == self.audio_str { | |
626 | debug_log!(self; {" sending audio packet"}); | |
627 | self.acontrol.try_send_audio(PktSendEvent::Packet(pkt)); | |
628 | if self.acontrol.get_queue_size() >= FRAME_QUEUE_LEN { | |
629 | try_send = false; | |
630 | } | |
631 | } | |
632 | } | |
633 | }; | |
634 | } | |
635 | self.vcontrol.fill(&mut disp_q); | |
636 | let mut sleep_time = 25; | |
637 | debug_log!(self; {format!(" time {}", self.tkeep.get_cur_time())}); | |
638 | if self.has_video { | |
639 | debug_log!(self; {format!(" disp queue {}-{}, {}-{} vqueue fill {}", disp_q.first_ts, disp_q.last_ts, disp_q.start, disp_q.end, self.vcontrol.get_queue_size())}); | |
640 | let ret = try_display(&mut disp_q, &mut canvas, &self.tkeep); | |
641 | if let Some(next_time) = ret { | |
642 | sleep_time = sleep_time.min(next_time); | |
643 | } | |
644 | } | |
645 | if self.has_audio { | |
646 | let time_left = self.acontrol.get_time_left(); | |
647 | debug_log!(self; {format!(" audio left {}", time_left)}); | |
648 | sleep_time = sleep_time.min(time_left); | |
649 | } | |
650 | debug_log!(self; {format!(" sleep {}ms", sleep_time)}); | |
651 | if last_disp.elapsed().as_millis() >= 10 { | |
652 | let c_time = self.tkeep.get_cur_time(); | |
653 | ||
654 | if !self.debug { | |
655 | print!(" {} {}% \r", format_time(c_time), self.acontrol.get_volume()); | |
656 | } else { | |
657 | print!(" {} {} {}% {:3} {:6}\r", format_time(c_time), if self.vcontrol.is_yuv() { 'Y' } else { 'R' }, self.acontrol.get_volume(), (disp_q.end + disp_q.len - disp_q.start) % disp_q.len, self.acontrol.get_fill()); | |
658 | } | |
659 | std::io::stdout().flush().unwrap(); | |
660 | last_disp = Instant::now(); | |
661 | } | |
662 | let mut end = true; | |
663 | if self.has_video && !self.vcontrol.is_video_end() { | |
664 | end = false; | |
665 | } | |
666 | if self.has_audio && !self.acontrol.is_audio_end() { | |
667 | end = false; | |
668 | } | |
669 | if end { | |
670 | break; | |
671 | } | |
672 | thread::sleep(Duration::from_millis(sleep_time)); | |
673 | } else { | |
674 | thread::sleep(Duration::from_millis(20)); | |
675 | } | |
676 | } | |
677 | println!(); | |
678 | std::mem::swap(&mut self.vcontrol, &mut new_vcontrol); | |
679 | new_vcontrol.finish(); | |
680 | std::mem::swap(&mut self.acontrol, &mut new_acontrol); | |
681 | new_acontrol.finish(); | |
682 | } | |
683 | } | |
684 | ||
685 | fn main() { | |
686 | let args: Vec<String> = env::args().collect(); | |
687 | ||
688 | if args.len() == 1 { | |
689 | println!("usage: nihav-player file1 file2 ..."); | |
690 | return; | |
691 | } | |
692 | ||
693 | let mut player = Player::new(); | |
694 | ||
695 | let mut aiter = args.iter().skip(1); | |
696 | let mut seek_time = NATimePoint::None; | |
697 | while let Some(arg) = aiter.next() { | |
698 | match arg.as_str() { | |
699 | "-an" => { player.play_audio = false; }, | |
700 | "-ae" => { player.play_audio = true; }, | |
701 | "-vn" => { player.play_video = false; }, | |
702 | "-ve" => { player.play_video = true; }, | |
703 | "-seek" => { | |
704 | if let Some(arg) = aiter.next() { | |
705 | if let Ok(time) = arg.parse::<NATimePoint>() { | |
706 | seek_time = time; | |
707 | } else { | |
708 | println!("wrong seek time"); | |
709 | seek_time = NATimePoint::None; | |
710 | } | |
711 | } | |
712 | }, | |
713 | "-vol" => { | |
714 | if let Some(arg) = aiter.next() { | |
715 | if let Ok(vol) = arg.parse::<usize>() { | |
716 | player.volume = vol.min(MAX_VOLUME); | |
717 | } else { | |
718 | println!("wrong volume"); | |
719 | } | |
720 | } | |
721 | }, | |
722 | "-debug" => { | |
723 | player.debug = true; | |
724 | }, | |
725 | "-nodebug" => { | |
726 | player.debug = false; | |
727 | }, | |
728 | _ => { | |
729 | player.play(arg, seek_time); | |
730 | if player.end { break; } | |
731 | seek_time = NATimePoint::None; | |
732 | }, | |
733 | }; | |
734 | } | |
735 | } |