+extern crate sdl2;
+extern crate nihav_core;
+extern crate nihav_registry;
+extern crate nihav_allstuff;
+
+use std::env;
+use std::fs::File;
+use std::io::Write;
+use std::path::Path;
+use std::time::{Duration, Instant};
+use std::thread;
+
+use sdl2::event::{Event, WindowEvent};
+use sdl2::keyboard::Keycode;
+use sdl2::render::{Canvas, Texture, TextureCreator};
+use sdl2::pixels::PixelFormatEnum;
+use sdl2::video::{Window, WindowContext};
+
+use nihav_registry::detect;
+use nihav_core::frame::*;
+use nihav_core::io::byteio::{FileReader, ByteReader};
+use nihav_core::reorder::*;
+use nihav_core::codecs::*;
+use nihav_core::demuxers::*;
+use nihav_registry::register::*;
+use nihav_allstuff::*;
+
+mod audiodec;
+use audiodec::*;
+mod videodec;
+use videodec::*;
+
+#[cfg(feature="debug")]
+macro_rules! debug_log {
+ ($log: expr; $blk: block) => {
+ $log.logfile.write($blk.as_bytes()).unwrap();
+ $log.logfile.write(b"\n").unwrap();
+ };
+}
+#[cfg(not(feature="debug"))]
+macro_rules! debug_log {
+ ($log: expr; $blk: block) => {};
+}
+
+pub enum PktSendEvent {
+ Packet(NAPacket),
+ Flush,
+ End,
+ ImmediateEnd,
+ HurryUp,
+}
+
+pub struct DecoderStuff {
+ pub dsupp: Box<NADecoderSupport>,
+ pub dec: Box<dyn NADecoder + Send>,
+ pub reord: Box<dyn FrameReorderer + Send>,
+}
+
+fn format_time(ms: u64) -> String {
+ let s = ms / 1000;
+ let ds = (ms % 1000) / 100;
+ let (min, s) = (s / 60, s % 60);
+ let (h, min) = (min / 60, min % 60);
+ if h == 0 {
+ if min == 0 {
+ format!("{}.{}", s, ds)
+ } else {
+ format!("{}:{:02}.{}", min, s, ds)
+ }
+ } else {
+ format!("{}:{:02}:{:02}.{}", h, min, s, ds)
+ }
+}
+
+const FRAME_QUEUE_LEN: usize = 25;
+const MAX_VOLUME: usize = 200;
+
+pub type FrameRecord = (NABufferType, u64);
+
+pub struct TimeKeep {
+ ref_time: Instant,
+ ref_ts: u64,
+}
+
+impl TimeKeep {
+ fn new() -> Self {
+ Self {
+ ref_time: Instant::now(),
+ ref_ts: 0,
+ }
+ }
+ pub fn get_cur_time(&self) -> u64 {
+ let add = self.ref_time.elapsed().as_millis() as u64;
+ self.ref_ts + add
+ }
+ fn reset_ts(&mut self) {
+ self.ref_ts = 0;
+ }
+ fn reset_all(&mut self, ts: u64) {
+ self.ref_time = Instant::now();
+ self.ref_ts = ts;
+ }
+ fn set_ts(&mut self) {
+ self.ref_ts = self.get_cur_time();
+ }
+ fn set_time(&mut self) {
+ self.ref_time = Instant::now();
+ }
+}
+
+pub struct DispFrame<'a> {
+ pub ts: u64,
+ pub is_yuv: bool,
+ pub valid: bool,
+ pub rgb_tex: Texture<'a>,
+ pub yuv_tex: Texture<'a>,
+}
+
+pub struct DispQueue<'a> {
+ pub pool: Vec<DispFrame<'a>>,
+ pub first_ts: u64,
+ pub last_ts: u64,
+ pub start: usize,
+ pub end: usize,
+ pub len: usize,
+ pub width: usize,
+ pub height: usize,
+}
+
+impl<'a> DispQueue<'a> {
+ fn new(texture_creator: &'a TextureCreator<WindowContext>, width: usize, height: usize, len: usize) -> Self {
+ let mut pool = Vec::with_capacity(len);
+ for _ in 0..len + 1 {
+ let rgb_tex = texture_creator.create_texture_streaming(PixelFormatEnum::RGB24, width as u32, height as u32).unwrap();
+ let yuv_tex = texture_creator.create_texture_streaming(PixelFormatEnum::IYUV, ((width + 1) & !1) as u32, ((height + 1) & !1) as u32).unwrap();
+ pool.push(DispFrame{ ts: 0, is_yuv: false, valid: false, rgb_tex, yuv_tex });
+ }
+ pool[len].is_yuv = false;
+ pool[len].rgb_tex.with_lock(None, |buffer: &mut [u8], _pitch: usize| {
+ for el in buffer.iter_mut() { *el = 0; }
+ }).unwrap();
+
+ Self { pool, first_ts: 0, last_ts: 0, start: 0, end: 0, len, width, height }
+ }
+
+ fn flush(&mut self) {
+ self.start = 0;
+ self.end = 0;
+ self.first_ts = 0;
+ self.last_ts = 0;
+ for frm in self.pool.iter_mut() {
+ frm.valid = false;
+ }
+ }
+
+ fn get_last_texture(&self) -> &Texture<'a> {
+ if self.pool[self.len].is_yuv {
+ &self.pool[self.len].yuv_tex
+ } else {
+ &self.pool[self.len].rgb_tex
+ }
+ }
+ pub fn is_empty(&self) -> bool { self.start == self.end }
+ pub fn is_full(&self) -> bool { self.len == 0 || self.start == (self.end + 1) % self.len }
+ pub fn move_end(&mut self) {
+ self.end += 1;
+ if self.end >= self.len {
+ self.end -= self.len;
+ }
+ }
+ pub fn move_start(&mut self) {
+ self.pool.swap(self.start, self.len);
+ self.start += 1;
+ if self.start >= self.len {
+ self.start -= self.len;
+ }
+ if !self.is_empty() {
+ self.first_ts = self.pool[self.start].ts;
+ }
+ }
+}
+
+fn try_display(disp_queue: &mut DispQueue, canvas: &mut Canvas<Window>, ctime: &TimeKeep) -> Option<u64> {
+ while !disp_queue.is_empty() {
+ let disp_time = disp_queue.first_ts;
+ let ctime = ctime.get_cur_time();
+ if disp_time > ctime + 10 {
+ return Some(disp_time - ctime);
+ } else if disp_time + 10 < ctime {
+ disp_queue.move_start();
+ } else {
+ let frm = &disp_queue.pool[disp_queue.start];
+ let texture = if frm.is_yuv { &frm.yuv_tex } else { &frm.rgb_tex };
+ canvas.clear();
+ canvas.copy(texture, None, None).unwrap();
+ canvas.present();
+
+ disp_queue.move_start();
+ if !disp_queue.is_empty() {
+ return Some((disp_queue.first_ts - ctime).saturating_sub(2));
+ } else {
+ return None;
+ }
+ }
+ }
+ None
+}
+
+struct Player {
+ sdl_context: sdl2::Sdl,
+ vsystem: sdl2::VideoSubsystem,
+ asystem: sdl2::AudioSubsystem,
+
+ acontrol: AudioControl,
+ vcontrol: VideoControl,
+
+ play_video: bool,
+ play_audio: bool,
+ has_video: bool,
+ has_audio: bool,
+ video_str: u32,
+ audio_str: u32,
+
+ paused: bool,
+ mute: bool,
+ volume: usize,
+ end: bool,
+
+ tkeep: TimeKeep,
+
+ debug: bool,
+
+ #[cfg(feature="debug")]
+ logfile: File,
+}
+
+impl Player {
+ fn new() -> Self {
+ let sdl_context = sdl2::init().unwrap();
+ let vsystem = sdl_context.video().unwrap();
+ let asystem = sdl_context.audio().unwrap();
+ vsystem.disable_screen_saver();
+ let acontrol = AudioControl::new(None, None, &asystem);
+ let vcontrol = VideoControl::new(None, 0, 0, 0, 0);
+ Self {
+ sdl_context, asystem, vsystem,
+
+ acontrol, vcontrol,
+
+ play_video: true,
+ play_audio: true,
+ has_video: false,
+ has_audio: false,
+ video_str: 0,
+ audio_str: 0,
+
+ paused: false,
+ mute: false,
+ volume: 100,
+ end: false,
+
+ tkeep: TimeKeep::new(),
+
+ debug: false,
+
+ #[cfg(feature="debug")]
+ logfile: File::create("debug.log").unwrap(),
+ }
+ }
+ fn seek(&mut self, off: u64, fwd: bool, dmx: &mut Demuxer, disp_queue: &mut DispQueue) {
+ let cur_time = self.tkeep.get_cur_time();
+ let seektime = if fwd { cur_time + off * 1000 } else {
+ cur_time.saturating_sub(off * 1000) };
+ debug_log!(self; {format!(" seek to {}", seektime)});
+
+ let ret = dmx.seek(NATimePoint::Milliseconds(seektime));
+ if ret.is_err() {
+ println!(" seek error");
+ return;
+ }
+
+ self.acontrol.flush();
+ self.vcontrol.flush();
+ disp_queue.flush();
+
+ self.tkeep.reset_ts();
+ self.prefill(dmx, disp_queue);
+ if !disp_queue.is_empty() {
+ self.tkeep.reset_all(disp_queue.first_ts);
+ } else {
+ let mut iterations = 0;
+ let mut time = self.acontrol.get_time();
+ while time.is_none() {
+ iterations += 1;
+ std::thread::yield_now();
+ if iterations > 1000000 { println!(" still no time set?!"); break; }
+ time = self.acontrol.get_time();
+ }
+ if let Some(time) = time {
+ self.tkeep.reset_all(time);
+ }
+ }
+ if !self.paused {
+ self.acontrol.resume();
+ }
+ }
+ fn prefill(&mut self, dmx: &mut Demuxer, disp_queue: &mut DispQueue) {
+ debug_log!(self; {" prefilling"});
+ while self.vcontrol.get_queue_size() < FRAME_QUEUE_LEN {
+ let mut try_send = self.acontrol.get_queue_size() < FRAME_QUEUE_LEN && (!self.has_video || (!self.vcontrol.is_filled(FRAME_QUEUE_LEN) && !disp_queue.is_full()));
+
+ if !self.vcontrol.try_send_queued() && self.vcontrol.get_queue_size() > FRAME_QUEUE_LEN / 2 {
+ try_send = false;
+ }
+ if !self.acontrol.try_send_queued() && self.acontrol.get_queue_size() > FRAME_QUEUE_LEN / 2 {
+ try_send = false;
+ }
+ if try_send {
+ match dmx.get_frame() {
+ Err(DemuxerError::EOF) => break,
+ Err(_) => break,
+ Ok(pkt) => {
+ let streamno = pkt.get_stream().get_id();
+ if self.has_video && streamno == self.video_str {
+ self.vcontrol.try_send_video(PktSendEvent::Packet(pkt));
+ } else if self.has_audio && streamno == self.audio_str {
+ self.acontrol.try_send_audio(PktSendEvent::Packet(pkt));
+ }
+ }
+ };
+ }
+ self.vcontrol.fill(disp_queue);
+
+ if !try_send {
+ break;
+ }
+ }
+ if self.has_video {
+ while self.vcontrol.get_queue_size() > 0 && !disp_queue.is_full() {
+ self.vcontrol.try_send_queued();
+ self.vcontrol.fill(disp_queue);
+ std::thread::sleep(Duration::from_millis(10));
+ }
+ self.vcontrol.fill(disp_queue);
+ }
+ debug_log!(self; {format!(" prefilling done, frames {}-{} audio {}", disp_queue.start, disp_queue.end, self.acontrol.get_fill())});
+ }
+ fn handle_events(&mut self, event_pump: &mut sdl2::EventPump, canvas: &mut Canvas<Window>, dmx: &mut Demuxer, disp_queue: &mut DispQueue) -> bool {
+ for event in event_pump.poll_iter() {
+ if let Event::Quit {..} = event {
+ self.end = true;
+ println!();
+ return true;
+ }
+ if let Event::Window {win_event: WindowEvent::Exposed, ..} = event {
+ canvas.clear();
+ canvas.copy(disp_queue.get_last_texture(), None, None).unwrap();
+ canvas.present();
+ }
+ if let Event::KeyDown {keycode: Some(keycode), ..} = event {
+ match keycode {
+ Keycode::Escape | Keycode::Q => {
+ self.end = true;
+ println!();
+ return true;
+ },
+ Keycode::Return => return true,
+ Keycode::Right => { self.seek(10, true, dmx, disp_queue); },
+ Keycode::Left => { self.seek(10, false, dmx, disp_queue); },
+ Keycode::Up => { self.seek(60, true, dmx, disp_queue); },
+ Keycode::Down => { self.seek(60, false, dmx, disp_queue); },
+ Keycode::PageUp => { self.seek(600, true, dmx, disp_queue); },
+ Keycode::PageDown => { self.seek(600, false, dmx, disp_queue); },
+ Keycode::Space => {
+ self.paused = !self.paused;
+ if self.paused {
+ self.vsystem.enable_screen_saver();
+ self.tkeep.set_ts();
+ } else {
+ self.vsystem.disable_screen_saver();
+ self.tkeep.set_time();
+ }
+ if self.paused {
+ self.acontrol.pause();
+ } else {
+ self.acontrol.resume();
+ }
+ },
+ Keycode::Plus | Keycode::KpPlus => {
+ self.volume = (self.volume + 10).min(MAX_VOLUME);
+ if !self.mute {
+ self.acontrol.set_volume(self.volume);
+ }
+ },
+ Keycode::Minus | Keycode::KpMinus => {
+ self.volume = self.volume.saturating_sub(10);
+ if !self.mute {
+ self.acontrol.set_volume(self.volume);
+ }
+ },
+ Keycode::D => {
+ self.debug = !self.debug;
+ },
+ Keycode::M => {
+ self.mute = !self.mute;
+ if self.mute {
+ self.acontrol.set_volume(0);
+ } else {
+ self.acontrol.set_volume(self.volume);
+ }
+ },
+ Keycode::H => {
+ self.vcontrol.try_send_video(PktSendEvent::HurryUp);
+ },
+ _ => {},
+ };
+ if !self.paused {
+ print!("{:60}\r", ' ');
+ std::io::stdout().flush().unwrap();
+ }
+ }
+ }
+ false
+ }
+ fn play(&mut self, name: &str, start_time: NATimePoint) {
+ debug_log!(self; {format!("Playing {}", name)});
+
+ // prepare data source
+ let path = Path::new(name);
+ let mut file = File::open(path).unwrap();
+ let dmx_fact;
+ let mut fr = FileReader::new_read(&mut file);
+ let mut br = ByteReader::new(&mut fr);
+ let res = detect::detect_format(name, &mut br);
+ if res.is_none() {
+ println!("cannot detect format for {}", name);
+ return;
+ }
+ let (dmx_name, _score) = res.unwrap();
+ debug_log!(self; {format!(" found demuxer {} with score {:?}", dmx_name, _score)});
+ println!("trying demuxer {} on {}", dmx_name, name);
+
+ let mut dmx_reg = RegisteredDemuxers::new();
+ nihav_register_all_demuxers(&mut dmx_reg);
+ let mut dec_reg = RegisteredDecoders::new();
+ nihav_register_all_decoders(&mut dec_reg);
+
+ let ret = dmx_reg.find_demuxer(dmx_name);
+ if ret.is_none() {
+ println!("error finding {} demuxer", dmx_name);
+ return;
+ }
+ dmx_fact = ret.unwrap();
+ br.seek(SeekFrom::Start(0)).unwrap();
+ let ret = create_demuxer(dmx_fact, &mut br);
+ if ret.is_err() {
+ println!("error creating demuxer");
+ return;
+ }
+ let mut dmx = ret.unwrap();
+ if start_time != NATimePoint::None {
+ debug_log!(self; {format!(" start seek to {}", start_time)});
+ if dmx.seek(start_time).is_err() {
+ println!("initial seek failed");
+ }
+ }
+
+ let mut width = 640;
+ let mut height = 480;
+ let mut tb_num = 0;
+ let mut tb_den = 0;
+ let mut ainfo: Option<NAAudioInfo> = None;
+
+ let mut video_dec: Option<DecoderStuff> = None;
+ let mut audio_dec: Option<DecoderStuff> = None;
+
+ let duration = dmx.get_duration();
+ if duration != 0 {
+ println!(" total duration {}", format_time(duration));
+ }
+ self.has_video = false;
+ self.has_audio = false;
+ for i in 0..dmx.get_num_streams() {
+ let s = dmx.get_stream(i).unwrap();
+ let info = s.get_info();
+ let decfunc = dec_reg.find_decoder(info.get_name());
+ println!("stream {} - {} {}", i, s, info.get_name());
+ debug_log!(self; {format!(" stream {} - {} {}", i, s, info.get_name())});
+ let str_id = s.get_id();
+ if info.is_video() {
+ if video_dec.is_none() && self.play_video {
+ if let Some(decfunc) = decfunc {
+ let mut dec = (decfunc)();
+ let mut dsupp = Box::new(NADecoderSupport::new());
+ let props = info.get_properties().get_video_info().unwrap();
+ if props.get_width() != 0 {
+ width = props.get_width();
+ height = props.get_height();
+ }
+ let desc = get_codec_description(info.get_name());
+ let (reorder_depth, reord) = if desc.is_none() || (desc.unwrap().caps & CODEC_CAP_COMPLEX_REORDER) == 0 {
+ let reord: Box<dyn FrameReorderer + Send> = Box::new(IPBReorderer::new());
+ (3, reord)
+ } else {
+ let reord: Box<dyn FrameReorderer + Send> = Box::new(ComplexReorderer::new());
+ (16, reord)
+ };
+ dsupp.pool_u8 = NAVideoBufferPool::new(reorder_depth);
+ dsupp.pool_u16 = NAVideoBufferPool::new(reorder_depth);
+ dsupp.pool_u32 = NAVideoBufferPool::new(reorder_depth);
+ dec.init(&mut dsupp, info).unwrap();
+ video_dec = Some(DecoderStuff{ dsupp, dec, reord });
+ self.video_str = str_id;
+ let (tbn, tbd) = s.get_timebase();
+ tb_num = tbn;
+ tb_den = tbd;
+ self.has_video = true;
+ } else {
+ println!("no video decoder for {} found!", info.get_name());
+ }
+ }
+ } else if info.is_audio() {
+ if audio_dec.is_none() && self.play_audio {
+ if let Some(decfunc) = decfunc {
+ let mut dec = (decfunc)();
+ let mut dsupp = Box::new(NADecoderSupport::new());
+ ainfo = info.get_properties().get_audio_info();
+ dec.init(&mut dsupp, info).unwrap();
+ let reord = Box::new(NoReorderer::new());
+ audio_dec = Some(DecoderStuff{ dsupp, dec, reord });
+ self.audio_str = str_id;
+ self.has_audio = true;
+ } else {
+ println!("no audio decoder for {} found!", info.get_name());
+ }
+ }
+ } else {
+ println!("decoder {} not found", info.get_name());
+ }
+ }
+ if !self.has_video && !self.has_audio {
+ println!("No playable streams found.");
+ return;
+ }
+
+ while (width <= 384) && (height <= 288) {
+ width <<= 1;
+ height <<= 1;
+ }
+
+ // prepare playback structure
+ let mut new_vcontrol = VideoControl::new(video_dec, width, height, tb_num, tb_den);
+ std::mem::swap(&mut self.vcontrol, &mut new_vcontrol);
+
+ let mut new_acontrol = AudioControl::new(audio_dec, ainfo, &self.asystem);
+ std::mem::swap(&mut self.acontrol, &mut new_acontrol);
+
+ if self.mute {
+ self.acontrol.set_volume(0);
+ } else {
+ self.acontrol.set_volume(self.volume);
+ }
+
+ let fname = path.file_name();
+ let wname = if let Some(fname) = fname {
+ "NihAV player - ".to_owned() + fname.to_str().unwrap()
+ } else {
+ "NihAV player".to_owned()
+ };
+ let window = self.vsystem.window(&wname, width as u32, height as u32)
+ .position_centered().build().unwrap();
+ let mut canvas = window.into_canvas().build().unwrap();
+ let texture_creator = canvas.texture_creator();
+ let mut disp_q = DispQueue::new(&texture_creator, width, height, if self.has_video { FRAME_QUEUE_LEN } else { 0 });
+ if !self.has_video {
+ canvas.clear();
+ canvas.copy(disp_q.get_last_texture(), None, None).unwrap();
+ canvas.present();
+ }
+
+ self.has_audio = self.acontrol.has_audio();
+ if !self.has_video && !self.has_audio {
+ println!("No playable streams.");
+ return;
+ }
+
+ // play
+ self.prefill(&mut dmx, &mut disp_q);
+ self.tkeep.reset_all(0);
+ if !self.paused {
+ self.acontrol.resume();
+ }
+ let mut event_pump = self.sdl_context.event_pump().unwrap();
+ let mut last_disp = Instant::now();
+ let mut has_data = true;
+ 'main: loop {
+ if self.handle_events(&mut event_pump, &mut canvas, &mut dmx, &mut disp_q) {
+ println!();
+ break 'main;
+ }
+ if !self.paused {
+ let mut try_send = self.acontrol.get_queue_size() < FRAME_QUEUE_LEN && self.vcontrol.get_queue_size() < FRAME_QUEUE_LEN;
+ if !self.vcontrol.try_send_queued() && self.vcontrol.is_filled(FRAME_QUEUE_LEN) {
+ try_send = false;
+ }
+ if !self.acontrol.try_send_queued() {
+ try_send = false;
+ }
+ while has_data && try_send {
+ match dmx.get_frame() {
+ Err(DemuxerError::EOF) => {
+ self.vcontrol.try_send_video(PktSendEvent::End);
+ self.acontrol.try_send_audio(PktSendEvent::End);
+ has_data = false;
+ },
+ Err(err) => { println!("demuxer error {:?}", err); },
+ Ok(pkt) => {
+ let streamno = pkt.get_stream().get_id();
+ if self.has_video && streamno == self.video_str {
+ debug_log!(self; {" sending video packet"});
+ self.vcontrol.try_send_video(PktSendEvent::Packet(pkt));
+ if self.vcontrol.is_filled(FRAME_QUEUE_LEN) {
+ try_send = false;
+ }
+ } else if self.has_audio && streamno == self.audio_str {
+ debug_log!(self; {" sending audio packet"});
+ self.acontrol.try_send_audio(PktSendEvent::Packet(pkt));
+ if self.acontrol.get_queue_size() >= FRAME_QUEUE_LEN {
+ try_send = false;
+ }
+ }
+ }
+ };
+ }
+ self.vcontrol.fill(&mut disp_q);
+ let mut sleep_time = 25;
+ debug_log!(self; {format!(" time {}", self.tkeep.get_cur_time())});
+ if self.has_video {
+ debug_log!(self; {format!(" disp queue {}-{}, {}-{} vqueue fill {}", disp_q.first_ts, disp_q.last_ts, disp_q.start, disp_q.end, self.vcontrol.get_queue_size())});
+ let ret = try_display(&mut disp_q, &mut canvas, &self.tkeep);
+ if let Some(next_time) = ret {
+ sleep_time = sleep_time.min(next_time);
+ }
+ }
+ if self.has_audio {
+ let time_left = self.acontrol.get_time_left();
+ debug_log!(self; {format!(" audio left {}", time_left)});
+ sleep_time = sleep_time.min(time_left);
+ }
+ debug_log!(self; {format!(" sleep {}ms", sleep_time)});
+ if last_disp.elapsed().as_millis() >= 10 {
+ let c_time = self.tkeep.get_cur_time();
+
+ if !self.debug {
+ print!(" {} {}% \r", format_time(c_time), self.acontrol.get_volume());
+ } else {
+ print!(" {} {} {}% {:3} {:6}\r", format_time(c_time), if self.vcontrol.is_yuv() { 'Y' } else { 'R' }, self.acontrol.get_volume(), (disp_q.end + disp_q.len - disp_q.start) % disp_q.len, self.acontrol.get_fill());
+ }
+ std::io::stdout().flush().unwrap();
+ last_disp = Instant::now();
+ }
+ let mut end = true;
+ if self.has_video && !self.vcontrol.is_video_end() {
+ end = false;
+ }
+ if self.has_audio && !self.acontrol.is_audio_end() {
+ end = false;
+ }
+ if end {
+ break;
+ }
+ thread::sleep(Duration::from_millis(sleep_time));
+ } else {
+ thread::sleep(Duration::from_millis(20));
+ }
+ }
+ println!();
+ std::mem::swap(&mut self.vcontrol, &mut new_vcontrol);
+ new_vcontrol.finish();
+ std::mem::swap(&mut self.acontrol, &mut new_acontrol);
+ new_acontrol.finish();
+ }
+}
+
+fn main() {
+ let args: Vec<String> = env::args().collect();
+
+ if args.len() == 1 {
+ println!("usage: nihav-player file1 file2 ...");
+ return;
+ }
+
+ let mut player = Player::new();
+
+ let mut aiter = args.iter().skip(1);
+ let mut seek_time = NATimePoint::None;
+ while let Some(arg) = aiter.next() {
+ match arg.as_str() {
+ "-an" => { player.play_audio = false; },
+ "-ae" => { player.play_audio = true; },
+ "-vn" => { player.play_video = false; },
+ "-ve" => { player.play_video = true; },
+ "-seek" => {
+ if let Some(arg) = aiter.next() {
+ if let Ok(time) = arg.parse::<NATimePoint>() {
+ seek_time = time;
+ } else {
+ println!("wrong seek time");
+ seek_time = NATimePoint::None;
+ }
+ }
+ },
+ "-vol" => {
+ if let Some(arg) = aiter.next() {
+ if let Ok(vol) = arg.parse::<usize>() {
+ player.volume = vol.min(MAX_VOLUME);
+ } else {
+ println!("wrong volume");
+ }
+ }
+ },
+ "-debug" => {
+ player.debug = true;
+ },
+ "-nodebug" => {
+ player.debug = false;
+ },
+ _ => {
+ player.play(arg, seek_time);
+ if player.end { break; }
+ seek_time = NATimePoint::None;
+ },
+ };
+ }
+}