use sdl2::event::{Event, WindowEvent};
use sdl2::keyboard::Keycode;
+use sdl2::mouse::MouseButton;
use sdl2::render::{Canvas, Texture, TextureCreator};
use sdl2::pixels::PixelFormatEnum;
use sdl2::video::{Window, WindowContext};
use audiodec::*;
mod videodec;
use videodec::*;
+mod osd;
+use osd::*;
#[cfg(feature="debug")]
macro_rules! debug_log {
pub enum PktSendEvent {
Packet(NAPacket),
+ GetFrames,
Flush,
End,
ImmediateEnd,
HurryUp,
}
+pub enum DecoderType {
+ Audio(Box<dyn NADecoder + Send>),
+ Video(Box<dyn NADecoder + Send>, Box<dyn FrameReorderer + Send>),
+ VideoMT(Box<dyn NADecoderMT + Send>, MTFrameReorderer),
+}
+
pub struct DecoderStuff {
pub dsupp: Box<NADecoderSupport>,
- pub dec: Box<dyn NADecoder + Send>,
- pub reord: Box<dyn FrameReorderer + Send>,
+ pub dec: DecoderType,
}
fn format_time(ms: u64) -> String {
}
}
- fn get_last_texture(&self) -> &Texture<'a> {
+ fn get_last_texture(&mut self, osd: &OSD) -> &Texture<'a> {
if self.pool[self.len].is_yuv {
+ if osd.is_active() {
+ self.pool[self.len].yuv_tex.with_lock(None, |buffer: &mut [u8], pitch: usize| {
+ osd.draw_yuv(buffer, pitch);
+ }).unwrap();
+ }
&self.pool[self.len].yuv_tex
} else {
+ if osd.is_active() {
+ self.pool[self.len].rgb_tex.with_lock(None, |buffer: &mut [u8], pitch: usize| {
+ osd.draw_rgb(buffer, pitch);
+ }).unwrap();
+ }
&self.pool[self.len].rgb_tex
}
}
}
}
-fn try_display(disp_queue: &mut DispQueue, canvas: &mut Canvas<Window>, ctime: &TimeKeep) -> Option<u64> {
+fn try_display(disp_queue: &mut DispQueue, canvas: &mut Canvas<Window>, osd: &mut OSD, ctime: &TimeKeep) -> Option<u64> {
while !disp_queue.is_empty() {
let disp_time = disp_queue.first_ts;
let ctime = ctime.get_cur_time();
} else if disp_time + 10 < ctime {
disp_queue.move_start();
} else {
- let frm = &disp_queue.pool[disp_queue.start];
- let texture = if frm.is_yuv { &frm.yuv_tex } else { &frm.rgb_tex };
+ if osd.is_active() {
+ osd.prepare(ctime);
+ }
+ let frm = &mut disp_queue.pool[disp_queue.start];
+ let texture = if frm.is_yuv {
+ if osd.is_active() {
+ frm.yuv_tex.with_lock(None, |buffer: &mut [u8], pitch: usize| {
+ osd.draw_yuv(buffer, pitch);
+ }).unwrap();
+ }
+ &frm.yuv_tex
+ } else {
+ if osd.is_active() {
+ frm.rgb_tex.with_lock(None, |buffer: &mut [u8], pitch: usize| {
+ osd.draw_rgb(buffer, pitch);
+ }).unwrap();
+ }
+ &frm.rgb_tex
+ };
canvas.clear();
canvas.copy(texture, None, None).unwrap();
canvas.present();
sdl_context: sdl2::Sdl,
vsystem: sdl2::VideoSubsystem,
asystem: sdl2::AudioSubsystem,
+ xpos: Option<i32>,
+ ypos: Option<i32>,
acontrol: AudioControl,
vcontrol: VideoControl,
video_str: u32,
audio_str: u32,
+ vthreads: usize,
+ use_mt: bool,
+
paused: bool,
mute: bool,
volume: usize,
tkeep: TimeKeep,
debug: bool,
+ osd: OSD,
#[cfg(feature="debug")]
logfile: File,
let vcontrol = VideoControl::new(None, 0, 0, 0, 0);
Self {
sdl_context, asystem, vsystem,
+ xpos: None,
+ ypos: None,
acontrol, vcontrol,
video_str: 0,
audio_str: 0,
+ vthreads: 3,
+ use_mt: true,
+
paused: false,
mute: false,
volume: 100,
tkeep: TimeKeep::new(),
debug: false,
+ osd: OSD::new(),
#[cfg(feature="debug")]
logfile: File::create("debug.log").unwrap(),
self.vcontrol.fill(disp_queue);
std::thread::sleep(Duration::from_millis(10));
}
+ self.vcontrol.wait_for_frames();
self.vcontrol.fill(disp_queue);
}
debug_log!(self; {format!(" prefilling done, frames {}-{} audio {}", disp_queue.start, disp_queue.end, self.acontrol.get_fill())});
}
+ fn toggle_pause(&mut self) {
+ self.paused = !self.paused;
+ if self.paused {
+ self.vsystem.enable_screen_saver();
+ self.tkeep.set_ts();
+ } else {
+ self.vsystem.disable_screen_saver();
+ self.tkeep.set_time();
+ }
+ if self.paused {
+ self.acontrol.pause();
+ } else {
+ self.acontrol.resume();
+ }
+ }
fn handle_events(&mut self, event_pump: &mut sdl2::EventPump, canvas: &mut Canvas<Window>, dmx: &mut Demuxer, disp_queue: &mut DispQueue) -> bool {
for event in event_pump.poll_iter() {
if let Event::Quit {..} = event {
}
if let Event::Window {win_event: WindowEvent::Exposed, ..} = event {
canvas.clear();
- canvas.copy(disp_queue.get_last_texture(), None, None).unwrap();
+ canvas.copy(disp_queue.get_last_texture(&self.osd), None, None).unwrap();
canvas.present();
}
+ if let Event::MouseButtonDown {mouse_btn: MouseButton::Right, ..} = event {
+ self.toggle_pause();
+ }
if let Event::KeyDown {keycode: Some(keycode), ..} = event {
match keycode {
Keycode::Escape | Keycode::Q => {
Keycode::Down => { self.seek(60, false, dmx, disp_queue); },
Keycode::PageUp => { self.seek(600, true, dmx, disp_queue); },
Keycode::PageDown => { self.seek(600, false, dmx, disp_queue); },
- Keycode::Space => {
- self.paused = !self.paused;
- if self.paused {
- self.vsystem.enable_screen_saver();
- self.tkeep.set_ts();
- } else {
- self.vsystem.disable_screen_saver();
- self.tkeep.set_time();
- }
- if self.paused {
- self.acontrol.pause();
- } else {
- self.acontrol.resume();
- }
- },
+ Keycode::Space => { self.toggle_pause(); },
Keycode::Plus | Keycode::KpPlus => {
self.volume = (self.volume + 10).min(MAX_VOLUME);
if !self.mute {
Keycode::H => {
self.vcontrol.try_send_video(PktSendEvent::HurryUp);
},
+ Keycode::O => {
+ self.osd.toggle();
+ },
_ => {},
};
if !self.paused {
nihav_register_all_demuxers(&mut dmx_reg);
let mut dec_reg = RegisteredDecoders::new();
nihav_register_all_decoders(&mut dec_reg);
+ let mut mtdec_reg = RegisteredMTDecoders::new();
+ if self.use_mt {
+ nihav_register_all_mt_decoders(&mut mtdec_reg);
+ }
let ret = dmx_reg.find_demuxer(dmx_name);
if ret.is_none() {
let s = dmx.get_stream(i).unwrap();
let info = s.get_info();
let decfunc = dec_reg.find_decoder(info.get_name());
+ let decfunc_mt = mtdec_reg.find_decoder(info.get_name());
println!("stream {} - {} {}", i, s, info.get_name());
debug_log!(self; {format!(" stream {} - {} {}", i, s, info.get_name())});
let str_id = s.get_id();
if info.is_video() {
if video_dec.is_none() && self.play_video {
+ if let Some(decfunc) = decfunc_mt {
+ let mut dec = (decfunc)();
+ let mut dsupp = Box::new(NADecoderSupport::new());
+ let props = info.get_properties().get_video_info().unwrap();
+ if props.get_width() != 0 {
+ width = props.get_width();
+ height = props.get_height();
+ }
+ if dec.init(&mut dsupp, info.clone(), self.vthreads).is_ok() {
+ video_dec = Some(DecoderStuff{ dsupp, dec: DecoderType::VideoMT(dec, MTFrameReorderer::new()) });
+ self.video_str = str_id;
+ let (tbn, tbd) = s.get_timebase();
+ tb_num = tbn;
+ tb_den = tbd;
+ self.has_video = true;
+ continue;
+ } else {
+ println!("failed to create multi-threaded decoder, falling back");
+ }
+ }
if let Some(decfunc) = decfunc {
let mut dec = (decfunc)();
let mut dsupp = Box::new(NADecoderSupport::new());
dsupp.pool_u16 = NAVideoBufferPool::new(reorder_depth);
dsupp.pool_u32 = NAVideoBufferPool::new(reorder_depth);
dec.init(&mut dsupp, info).unwrap();
- video_dec = Some(DecoderStuff{ dsupp, dec, reord });
+ video_dec = Some(DecoderStuff{ dsupp, dec: DecoderType::Video(dec, reord) });
self.video_str = str_id;
let (tbn, tbd) = s.get_timebase();
tb_num = tbn;
let mut dsupp = Box::new(NADecoderSupport::new());
ainfo = info.get_properties().get_audio_info();
dec.init(&mut dsupp, info).unwrap();
- let reord = Box::new(NoReorderer::new());
- audio_dec = Some(DecoderStuff{ dsupp, dec, reord });
+ audio_dec = Some(DecoderStuff{ dsupp, dec: DecoderType::Audio(dec) });
self.audio_str = str_id;
self.has_audio = true;
} else {
} else {
"NihAV player".to_owned()
};
- let window = self.vsystem.window(&wname, width as u32, height as u32)
- .position_centered().build().unwrap();
+ let mut builder = self.vsystem.window(&wname, width as u32, height as u32);
+ let window = if let (Some(xpos), Some(ypos)) = (self.xpos, self.ypos) {
+ builder.position(xpos, ypos).build().unwrap()
+ } else {
+ builder.position_centered().build().unwrap()
+ };
let mut canvas = window.into_canvas().build().unwrap();
let texture_creator = canvas.texture_creator();
let mut disp_q = DispQueue::new(&texture_creator, width, height, if self.has_video { FRAME_QUEUE_LEN } else { 0 });
if !self.has_video {
canvas.clear();
- canvas.copy(disp_q.get_last_texture(), None, None).unwrap();
+ canvas.copy(disp_q.get_last_texture(&self.osd), None, None).unwrap();
canvas.present();
}
self.acontrol.try_send_audio(PktSendEvent::End);
has_data = false;
},
- Err(err) => { println!("demuxer error {:?}", err); },
+ Err(err) => {
+ println!("demuxer error {:?}", err);
+ if err == DemuxerError::IOError {
+ self.vcontrol.try_send_video(PktSendEvent::End);
+ self.acontrol.try_send_audio(PktSendEvent::End);
+ has_data = false;
+ }
+ },
Ok(pkt) => {
let streamno = pkt.get_stream().get_id();
if self.has_video && streamno == self.video_str {
debug_log!(self; {format!(" time {}", self.tkeep.get_cur_time())});
if self.has_video {
debug_log!(self; {format!(" disp queue {}-{}, {}-{} vqueue fill {}", disp_q.first_ts, disp_q.last_ts, disp_q.start, disp_q.end, self.vcontrol.get_queue_size())});
- let ret = try_display(&mut disp_q, &mut canvas, &self.tkeep);
+ let ret = try_display(&mut disp_q, &mut canvas, &mut self.osd, &self.tkeep);
if let Some(next_time) = ret {
sleep_time = sleep_time.min(next_time);
}
thread::sleep(Duration::from_millis(20));
}
}
+ let (xpos, ypos) = canvas.into_window().position();
+ self.xpos = Some(xpos);
+ self.ypos = Some(ypos);
println!();
std::mem::swap(&mut self.vcontrol, &mut new_vcontrol);
new_vcontrol.finish();
"-nodebug" => {
player.debug = false;
},
+ "-mt" => {
+ player.use_mt = true;
+ },
+ "-nomt" => {
+ player.use_mt = false;
+ },
+ "-threads" => {
+ if let Some(arg) = aiter.next() {
+ if let Ok(val) = arg.parse::<usize>() {
+ player.vthreads = val.max(1);
+ } else {
+ println!("wrong number of threads");
+ }
+ }
+ },
_ => {
player.play(arg, seek_time);
if player.end { break; }