31642ad0c7e44d7a8550ac9c77e6453987fa723d
[nihav-player.git] / src / main.rs
1 extern crate sdl;
2 extern crate nihav_core;
3 extern crate nihav_allstuff;
4
5 use sdl::video::*;
6 use sdl::audio::{DesiredAudioSpec, Channels, AudioFormat, AudioCallback};
7 use sdl::event::{Event, Key};
8
9 use std::env;
10 use std::fs::File;
11 use std::path::Path;
12 use std::time::{Duration, SystemTime};
13 use std::thread;
14 use std::sync::mpsc;
15 use std::sync::{Arc, Mutex};
16 use std::sync::atomic::{AtomicBool, Ordering};
17 use std::str::FromStr;
18
19 use nihav_core::detect;
20 use nihav_core::formats::*;
21 use nihav_core::frame::*;
22 use nihav_core::io::byteio::{FileReader, ByteReader};
23 use nihav_core::reorder::*;
24 use nihav_core::codecs::*;
25 use nihav_core::demuxers::*;
26 use nihav_core::scale::*;
27 use nihav_core::soundcvt::*;
28 use nihav_allstuff::*;
29
30 const AUDIO_BUF_SIZE: usize = 1024;
31
32 struct AudioFIFO {
33 data: Vec<u8>,
34 max_len: usize,
35 pos: usize,
36 end: usize,
37 done: bool,
38 full: bool,
39 }
40
41 impl AudioFIFO {
42 fn new(len: usize, max_len: usize) -> Self {
43 Self { data: vec![0; len], max_len, pos: 0, end: 0, done: false, full: false }
44 }
45 fn add(&mut self, src: &[u8]) -> bool {
46 if self.done || self.full { return true; }
47 if self.pos > 0 {
48 for i in 0..(self.end - self.pos) {
49 self.data[i] = self.data[i + self.pos];
50 }
51 self.end -= self.pos;
52 self.pos = 0;
53 }
54 if self.end + src.len() > self.max_len {
55 self.full = true;
56 return false;
57 }
58 if self.end + src.len() > self.data.len() { self.data.resize(self.end + src.len(), 0); }
59 (&mut self.data[self.end..][..src.len()]).copy_from_slice(src);
60 self.end += src.len();
61 true
62 }
63 fn consume(&mut self, dst: &mut [u8]) -> bool {
64 if self.done { for el in dst.iter_mut() { *el = 0; } return true; }
65 let size = dst.len();
66 if self.end - self.pos < size { return false; }
67 dst.copy_from_slice(&self.data[self.pos..][..size]);
68 self.pos += size;
69 if self.pos >= self.max_len / 2 { self.full = false; }
70 true
71 }
72 fn finish(&mut self) {
73 self.done = true;
74 }
75 }
76
77 struct AudioConsumer {
78 afifo: Arc<Mutex<AudioFIFO>>,
79 }
80
81 impl AudioConsumer {
82 fn new(afifo: Arc<Mutex<AudioFIFO>>) -> Self { Self { afifo } }
83 }
84
85 impl AudioCallback for AudioConsumer {
86 fn callback(&mut self, out: &mut [u8]) {
87 let mut done = false;
88 while !done {
89 let ret = self.afifo.lock();
90 if let Ok(mut afifo) = ret {
91 done = afifo.consume(out);
92 }
93 if !done { thread::sleep(Duration::from_millis(400)); }
94 }
95 }
96 }
97
98 impl Drop for AudioConsumer {
99 fn drop(&mut self) {
100 let ret = self.afifo.lock();
101 if let Ok(mut afifo) = ret {
102 afifo.finish();
103 }
104 }
105 }
106
107 struct DecoderStuff {
108 dsupp: Box<NADecoderSupport>,
109 dec: Box<dyn NADecoder + Send>,
110 reord: Box<dyn FrameReorderer + Send>,
111 }
112
113 enum SendEvent {
114 Packet(NAPacket),
115 EOF,
116 }
117
118 enum DisplayEvent {
119 RGB(u64, NABufferRef<Surface>),
120 YUV(u64, NABufferRef<Overlay>),
121 Audio(NABufferType),
122 }
123
124 impl DisplayEvent {
125 fn get_time(&self) -> u64 {
126 match *self {
127 DisplayEvent::RGB(time, _) => time,
128 DisplayEvent::YUV(time, _) => time,
129 _ => 0,
130 }
131 }
132 }
133
134 struct CommonMessages {
135 receiver: mpsc::Receiver<SendEvent>,
136 esend: mpsc::SyncSender<DisplayEvent>,
137 ev_mtx: Arc<Mutex<isize>>,
138 finished: Arc<AtomicBool>,
139 }
140
141 fn add_audio<T:Copy>(amtx: &mut Arc<Mutex<AudioFIFO>>, data: &[T]) {
142 let len = std::mem::size_of::<T>() * data.len();
143 let mut done = false;
144 while !done {
145 let ret = amtx.try_lock();
146 if let Ok(mut afifo) = ret {
147 unsafe {
148 let u8_ptr = data.as_ptr();
149 let u8_data = std::mem::transmute((u8_ptr, len));
150 done = afifo.add(u8_data);
151 }
152 }
153 if !done { std::thread::sleep(Duration::from_millis(100)); }
154 }
155 }
156
157 fn open_audio(arate: u32, amtx: Arc<Mutex<AudioFIFO>>) -> NAAudioInfo {
158 let dspec = DesiredAudioSpec {
159 freq: arate as i32, format: AudioFormat::S16Lsb, channels: Channels::Stereo, samples: AUDIO_BUF_SIZE as u16,
160 callback: Box::new(AudioConsumer::new(amtx))
161 };
162 let tspec = sdl::audio::open(dspec).unwrap();
163 //println!("target spec: {} Hz fmt {:X} {} ch {} samp {} size", tspec.freq, tspec.format as u32, if tspec.channels == Channels::Stereo { 2 } else { 1 }, tspec.samples, tspec.size);
164 sdl::audio::pause(true);
165 let dst_ch = if tspec.channels == Channels::Stereo { 2 } else { 1 };
166 let snd_fmt = match tspec.format {
167 AudioFormat::U8 => SND_U8_FORMAT,
168 AudioFormat::S8 => unimplemented!(),
169 AudioFormat::U16Lsb => unimplemented!(),
170 AudioFormat::S16Lsb => SND_S16_FORMAT,
171 AudioFormat::U16Msb => unimplemented!(),
172 AudioFormat::S16Msb => NASoniton { bits: 16, be: true, packed: false, planar: false, float: false, signed: true },
173 };
174 NAAudioInfo { sample_rate: tspec.freq as u32, channels: dst_ch, format: snd_fmt, block_len: 1024 }
175 }
176
177 fn start_audio_dec(audio_dec: Option<DecoderStuff>, dst_info: NAAudioInfo, cmsg: CommonMessages) -> thread::JoinHandle<()> {
178 thread::spawn(move || {
179 let mut audio_dec = audio_dec.unwrap();
180 let dst_chmap = if dst_info.channels == 2 {
181 NAChannelMap::from_str("L,R").unwrap()
182 } else {
183 NAChannelMap::from_str("C").unwrap()
184 };
185 loop {
186 let ret = cmsg.receiver.recv();
187 if ret.is_err() { break; }
188 if let Ok(SendEvent::EOF) = ret { break; }
189 let pkt = if let Ok(SendEvent::Packet(pkt)) = ret { pkt } else { unreachable!(); };
190 let ret = audio_dec.dec.decode(&mut audio_dec.dsupp, &pkt);
191 if let Ok(frm) = ret {
192 let buf = frm.get_buffer();
193 let out_buf = convert_audio_frame(&buf, &dst_info, &dst_chmap).unwrap();
194 cmsg.esend.send(DisplayEvent::Audio(out_buf)).unwrap();
195 let mut count = cmsg.ev_mtx.lock().unwrap();
196 *count += 1;
197 drop(count);
198 thread::yield_now();
199 } else {
200 println!("error decoding audio");
201 }
202 }
203 cmsg.finished.store(true, Ordering::Relaxed);
204 })
205 }
206
207 fn start_video_dec(video_dec: Option<DecoderStuff>, ifmt: Option<NAVideoInfo>, width: usize, height: usize, tb_num: u32, tb_den: u32, cmsg: CommonMessages, scr_mtx: Arc<Mutex<NABufferRef<Surface>>>) -> thread::JoinHandle<()> {
208 thread::spawn(move || {
209 let mut ifmt = ifmt.unwrap();
210 let mut video_dec = video_dec.unwrap();
211 let rgb32_fmt = NAPixelFormaton { model: ColorModel::RGB(RGBSubmodel::RGB), components: 3,
212 comp_info: [
213 Some(NAPixelChromaton { h_ss: 0, v_ss: 0, packed: true, depth: 8, shift: 0, comp_offs: 0, next_elem: 4 }),
214 Some(NAPixelChromaton { h_ss: 0, v_ss: 0, packed: true, depth: 8, shift: 0, comp_offs: 1, next_elem: 4 }),
215 Some(NAPixelChromaton { h_ss: 0, v_ss: 0, packed: true, depth: 8, shift: 0, comp_offs: 2, next_elem: 4 }),
216 None, None
217 ], elem_size: 4, be: false, alpha: false, palette: false };
218 let ofmt_rgb = ScaleInfo { width, height, fmt: rgb32_fmt };
219 let ofmt_yuv = ScaleInfo { width, height, fmt: YUV420_FORMAT };
220
221 let sc_ifmt = ScaleInfo { width: ifmt.get_width(), height: ifmt.get_height(), fmt: ifmt.get_format() };
222 let mut do_yuv = if let ColorModel::YUV(_) = ifmt.get_format().get_model() { true } else { false };
223 let ofmt = if do_yuv { ofmt_yuv } else { ofmt_rgb };
224 let mut opic = alloc_video_buffer(NAVideoInfo::new(width, height, false, ofmt.fmt), 4).unwrap();
225 let mut scaler = NAScale::new(sc_ifmt, ofmt).unwrap();
226 loop {
227 let ret = cmsg.receiver.recv();
228 if ret.is_err() { break; }
229 if let Ok(SendEvent::EOF) = ret { break; }
230 let pkt = if let Ok(SendEvent::Packet(pkt)) = ret { pkt } else { unreachable!() };
231 let ret = video_dec.dec.decode(&mut video_dec.dsupp, &pkt);
232 if let Ok(frm) = ret {
233 video_dec.reord.add_frame(frm);
234 while let Some(frm) = video_dec.reord.get_frame() {
235 let bt = frm.get_buffer();
236 if let NABufferType::None = bt { continue; }
237 let vinfo = bt.get_video_info().unwrap();
238 if ifmt.get_width() != vinfo.get_width() ||
239 ifmt.get_height() != vinfo.get_height() ||
240 ifmt.get_format() != vinfo.get_format() {
241 println!("reinit scaler!");
242 ifmt = vinfo.clone();
243 let sc_ifmt = ScaleInfo { width: ifmt.get_width(), height: ifmt.get_height(), fmt: ifmt.get_format() };
244 do_yuv = if let ColorModel::YUV(_) = ifmt.get_format().get_model() { true } else { false };
245 let ofmt = if do_yuv { ofmt_yuv } else { ofmt_rgb };
246 opic = alloc_video_buffer(NAVideoInfo::new(width, height, false, ofmt.fmt), 4).unwrap();
247 scaler = NAScale::new(sc_ifmt, ofmt).unwrap();
248 }
249 let ret = scaler.convert(&bt, &mut opic);
250 if ret.is_err() { println!(" scaler error {:?}", ret.err()); continue; }
251 ret.unwrap();
252 let ts = frm.get_dts().unwrap_or(frm.get_pts().unwrap_or(0));
253 let time = NATimeInfo::ts_to_time(ts, 1000, tb_num, tb_den);
254
255 let buf = opic.get_vbuf().unwrap();
256 if !do_yuv {
257 let sstride = buf.get_stride(0);
258 let src = buf.get_data();
259 let surface = Surface::new(&[SurfaceFlag::SWSurface], width as isize, height as isize, 32, 0x000000FF, 0x0000FF00, 0x00FF0000, 0x00000000).unwrap();
260 let pitch = unsafe { (*surface.raw).pitch } as usize;
261 surface.with_lock(|x: &mut [u8]| -> bool {
262 let csize = sstride.min(pitch);
263 for (dst, src) in x.chunks_mut(pitch).zip(src.chunks(sstride)) {
264 (&mut dst[..csize]).copy_from_slice(&src[..csize]);
265 }
266 true
267 });
268 let mut count = cmsg.ev_mtx.lock().unwrap();
269 cmsg.esend.send(DisplayEvent::RGB(time, NABufferRef::new(surface))).unwrap();
270 *count += 1;
271 } else {
272 let screen = scr_mtx.lock().unwrap();
273 let overlay = screen.create_overlay(width as isize, height as isize, OverlayFormat::YV12).unwrap();
274 drop(screen);
275 while !overlay.lock() {}
276 let src = buf.get_data();
277 let ysstride = buf.get_stride(0);
278 let ysrc = &src[buf.get_offset(0)..];
279 let usstride = buf.get_stride(2);
280 let usrc = &src[buf.get_offset(2)..];
281 let vsstride = buf.get_stride(1);
282 let vsrc = &src[buf.get_offset(1)..];
283 unsafe {
284 let ydst = overlay.get_pixel_ptr(0);
285 let ydstride = overlay.get_pitch(0);
286 let udst = overlay.get_pixel_ptr(1);
287 let udstride = overlay.get_pitch(1);
288 let vdst = overlay.get_pixel_ptr(2);
289 let vdstride = overlay.get_pitch(2);
290 for (ydst, ysrc) in ydst.chunks_mut(ydstride).take(height).zip(ysrc.chunks(ysstride)) {
291 (&mut ydst[..width]).copy_from_slice(&ysrc[..width]);
292 }
293 for (udst, usrc) in udst.chunks_mut(udstride).take(height).zip(usrc.chunks(usstride)) {
294 (&mut udst[..width / 2]).copy_from_slice(&usrc[..width / 2]);
295 }
296 for (vdst, vsrc) in vdst.chunks_mut(vdstride).take(height).zip(vsrc.chunks(vsstride)) {
297 (&mut vdst[..width / 2]).copy_from_slice(&vsrc[..width / 2]);
298 }
299 }
300 overlay.unlock();
301 let mut count = cmsg.ev_mtx.lock().unwrap();
302 cmsg.esend.send(DisplayEvent::YUV(time, NABufferRef::new(overlay))).unwrap();
303 *count += 1;
304 }
305 }
306 } else {
307 println!("error decoding video");
308 }
309 }
310 cmsg.finished.store(true, Ordering::Relaxed);
311 })
312 }
313
314 fn play_file(args: Vec<String>) {
315
316 let mut cur_arg: usize = 1;
317 let mut decode_audio = true;
318 while (cur_arg < args.len()) && args[cur_arg].starts_with('-') {
319 match args[cur_arg].as_str() {
320 "--" => { break; },
321 "-an" => { decode_audio = false; },
322 _ => { println!("unknown option {}", args[cur_arg]); return; },
323 }
324 cur_arg += 1;
325 }
326 let name = args[cur_arg].as_str();
327
328 let path = Path::new(name);
329 let mut file = File::open(path).unwrap();
330 let dmx_fact;
331 let mut fr = FileReader::new_read(&mut file);
332 let mut br = ByteReader::new(&mut fr);
333 let res = detect::detect_format(name, &mut br);
334 if res.is_none() {
335 println!("cannot detect format for {}", name);
336 return;
337 }
338 let (dmx_name, _) = res.unwrap();
339 println!("trying demuxer {} on {}", dmx_name, name);
340
341 let mut dmx_reg = RegisteredDemuxers::new();
342 nihav_register_all_demuxers(&mut dmx_reg);
343 let mut dec_reg = RegisteredDecoders::new();
344 nihav_register_all_codecs(&mut dec_reg);
345
346 dmx_fact = dmx_reg.find_demuxer(dmx_name).unwrap();
347 br.seek(SeekFrom::Start(0)).unwrap();
348 let mut dmx = create_demuxer(dmx_fact, &mut br).unwrap();
349
350 let mut width = 640;
351 let mut height = 480;
352 let mut ifmt = None;
353 let mut tb_num = 0;
354 let mut tb_den = 0;
355 let mut arate = 0;
356 let mut video_str = 0;
357 let mut audio_str = 0;
358
359 let mut video_dec: Option<DecoderStuff> = None;
360 let mut audio_dec: Option<DecoderStuff> = None;
361
362 for i in 0..dmx.get_num_streams() {
363 let s = dmx.get_stream(i).unwrap();
364 let info = s.get_info();
365 let decfunc = dec_reg.find_decoder(info.get_name());
366 println!("stream {} - {} {}", i, s, info.get_name());
367 let str_id = s.get_id();
368 if info.is_video() {
369 if video_dec.is_none() {
370 if decfunc.is_none() {
371 println!("no video decoder for {} found!", info.get_name());
372 return;
373 }
374 let mut dec = (decfunc.unwrap())();
375 let mut dsupp = Box::new(NADecoderSupport::new());
376 let props = info.get_properties().get_video_info().unwrap();
377 if props.get_width() != 0 {
378 width = props.get_width();
379 height = props.get_height();
380 ifmt = Some(props.clone());
381 }
382 let reorder_depth = 3;
383 dsupp.pool_u8 = NAVideoBufferPool::new(reorder_depth);
384 dsupp.pool_u16 = NAVideoBufferPool::new(reorder_depth);
385 dsupp.pool_u32 = NAVideoBufferPool::new(reorder_depth);
386 dec.init(&mut dsupp, info).unwrap();
387 let reord = Box::new(IPBReorderer::new());
388 video_dec = Some(DecoderStuff{ dsupp, dec, reord });
389 video_str = str_id;
390 let (tbn, tbd) = s.get_timebase();
391 tb_num = tbn;
392 tb_den = tbd;
393 }
394 } else if info.is_audio() {
395 if audio_dec.is_none() && decode_audio {
396 if decfunc.is_none() {
397 println!("no audio decoder for {} found!", info.get_name());
398 } else {
399 let mut dec = (decfunc.unwrap())();
400 let mut dsupp = Box::new(NADecoderSupport::new());
401 let props = info.get_properties().get_audio_info().unwrap();
402 arate = props.get_sample_rate();
403 dec.init(&mut dsupp, info).unwrap();
404 let reord = Box::new(NoReorderer::new());
405 audio_dec = Some(DecoderStuff{ dsupp, dec, reord });
406 audio_str = str_id;
407 }
408 }
409 } else {
410 println!("decoder {} not found", info.get_name());
411 }
412 }
413
414 while (width <= 384) && (height <= 288) {
415 width <<= 1;
416 height <<= 1;
417 }
418
419 sdl::init(&[sdl::InitFlag::Video, sdl::InitFlag::Audio]);
420 sdl::wm::set_caption("NihAV Player", "nihav-player");
421 let screen = match sdl::video::set_video_mode(width as isize, height as isize, 32,
422 &[SurfaceFlag::HWSurface, SurfaceFlag::AsyncBlit, SurfaceFlag::HWAccel],
423 &[VideoFlag::DoubleBuf]) {
424 Ok(screen) => screen,
425 Err(err) => panic!("failed to set video mode: {}", err)
426 };
427
428 let (vsend, vrecv) = mpsc::sync_channel::<SendEvent>(0);
429 let (asend, arecv) = mpsc::sync_channel::<SendEvent>(0);
430 let (esend, erecv) = mpsc::sync_channel::<DisplayEvent>(50);
431 let events_mtx = Arc::new(Mutex::new(0isize));
432
433 let has_audio = audio_dec.is_some();
434 let mut amtx = Arc::new(Mutex::new(AudioFIFO::new((arate * 8) as usize, (arate * 80) as usize)));
435 let aud_finished = Arc::new(AtomicBool::new(!has_audio));
436 let audio_thread = if has_audio {
437 let ainfo = open_audio(arate, amtx.clone());
438 let cmsg = CommonMessages {
439 receiver: arecv,
440 esend: esend.clone(),
441 ev_mtx: events_mtx.clone(),
442 finished: aud_finished.clone(),
443 };
444 Some(start_audio_dec(audio_dec, ainfo, cmsg))
445 } else { None };
446
447 let has_video = video_dec.is_some();
448 let video_thread: Option<thread::JoinHandle<()>>;
449 let scr_mtx = Arc::new(Mutex::new(NABufferRef::new(screen)));
450 let vid_finished = Arc::new(AtomicBool::new(!has_video));
451 if has_video {
452 let cmsg = CommonMessages {
453 receiver: vrecv,
454 esend: esend,
455 ev_mtx: events_mtx.clone(),
456 finished: vid_finished.clone(),
457 };
458 video_thread = Some(start_video_dec(video_dec, ifmt, width, height, tb_num, tb_den, cmsg, scr_mtx.clone()));
459 } else {
460 video_thread = None;
461 };
462
463 let mut frame_queue: Vec<DisplayEvent> = Vec::new();
464
465 let systime = SystemTime::now();
466 let mut has_data = true;
467
468 'main : loop {
469 'event : loop {
470 match sdl::event::poll_event() {
471 Event::Quit => break 'main,
472 Event::None => break 'event,
473 Event::Key(k, _, _, _)
474 if k == Key::Escape || k == Key::Q
475 => break 'main,
476 Event::Key(k, _, _, _)
477 if k == Key::Space
478 => continue 'event,
479 _ => {}
480 }
481 }
482 if has_data {
483 let pktres = dmx.get_frame();
484 if let Err(DemuxerError::EOF) = pktres {
485 has_data = false;
486 if has_video {
487 vsend.send(SendEvent::EOF).unwrap();
488 }
489 if has_audio {
490 asend.send(SendEvent::EOF).unwrap();
491 }
492 } else if let Err(_) = pktres {
493 break;
494 } else if let Ok(pkt) = pktres {
495 let streamno = pkt.get_stream().get_id();
496
497 if has_video && streamno == video_str {
498 vsend.send(SendEvent::Packet(pkt)).unwrap();
499 } else if has_audio && streamno == audio_str {
500 asend.send(SendEvent::Packet(pkt)).unwrap();
501 }
502 }
503 }
504
505 let mut nevents = events_mtx.lock().unwrap();
506 while *nevents > 0 {
507 *nevents -= 1;
508 let ret = erecv.recv();
509 if ret.is_err() { break 'main; }
510 let mut disp_evt = ret.unwrap();
511 match disp_evt {
512 DisplayEvent::Audio(ref mut out_buf) => {
513 unsafe { sdl::audio::ll::SDL_LockAudio(); }
514 match out_buf {
515 NABufferType::AudioPacked(buf) => add_audio(&mut amtx, buf.get_data()),
516 NABufferType::AudioU8(buf) => add_audio(&mut amtx, buf.get_data()),
517 NABufferType::AudioI16(buf) => add_audio(&mut amtx, buf.get_data()),
518 NABufferType::AudioI32(buf) => add_audio(&mut amtx, buf.get_data()),
519 NABufferType::AudioF32(buf) => add_audio(&mut amtx, buf.get_data()),
520 _ => unreachable!(),
521 };
522 unsafe { sdl::audio::ll::SDL_UnlockAudio(); }
523 sdl::audio::pause(false);
524 },
525 _ => { frame_queue.push(disp_evt); },
526 };
527 }
528 drop(nevents);
529 if vid_finished.load(Ordering::Relaxed) &&
530 aud_finished.load(Ordering::Relaxed) && frame_queue.len() == 0 {
531 break;
532 }
533
534 if frame_queue.len() > 0 {
535 let cur_time = systime.elapsed().unwrap();
536 let disp_time = Duration::from_millis(frame_queue[0].get_time());
537
538 //println!("cur time {:?} disp time {:?}", cur_time, disp_time);
539 if (disp_time <= cur_time + Duration::from_millis(10)) && disp_time + Duration::from_millis(10) >= cur_time {
540 let screen = scr_mtx.lock().unwrap();
541 let disp_evt = frame_queue.remove(0);
542 match disp_evt {
543 DisplayEvent::RGB(_, ref surf) => {
544 screen.blit(surf);
545 screen.flip();
546 },
547 DisplayEvent::YUV(_, ref ovl) => {
548 ovl.display(Some(screen.get_rect()));
549 },
550 _ => {},
551 };
552 } else if disp_time > cur_time {
553 let diff = disp_time - cur_time;
554 if diff > Duration::from_millis(20) {
555 thread::sleep(Duration::from_millis(20));
556 } else {
557 thread::sleep(diff);
558 }
559 } else {
560 frame_queue.remove(0);
561 }
562 }
563 }
564
565 if has_audio {
566 unsafe { sdl::audio::ll::SDL_LockAudio(); }
567 let mut afifo = amtx.lock().unwrap();
568 afifo.finish();
569 drop(afifo);
570 unsafe { sdl::audio::ll::SDL_UnlockAudio(); }
571 sdl::audio::pause(true);
572 }
573 drop(vsend);
574 drop(asend);
575 if let Some(vthread) = video_thread {
576 vthread.join().unwrap();
577 }
578 if let Some(athread) = audio_thread {
579 athread.join().unwrap();
580 }
581
582 if has_audio {
583 sdl::audio::close();
584 }
585 }
586
587 fn main() {
588 let args: Vec<String> = env::args().collect();
589
590 if args.len() == 1 {
591 println!("usage: nihav-player input");
592 return;
593 }
594
595 play_file(args);
596
597 sdl::quit();
598 }