use std::sync::atomic::{AtomicU8, Ordering};
use sdl2::event::{Event, WindowEvent};
-use sdl2::keyboard::Keycode;
-use sdl2::mouse::MouseButton;
+use sdl2::keyboard::{Keycode, Mod};
+use sdl2::mouse::{MouseButton, MouseWheelDirection};
use sdl2::render::{Canvas, Texture, TextureCreator};
use sdl2::pixels::PixelFormatEnum;
use sdl2::video::{Window, WindowContext};
use nihav_registry::register::*;
use nihav_allstuff::*;
+#[cfg(feature="hwaccel")]
+use hwdec_vaapi::*;
+
mod audiodec;
use audiodec::*;
mod videodec;
use osd::*;
#[repr(u8)]
-#[derive(Clone,Copy,Debug,PartialEq)]
+#[derive(Clone,Copy,Debug,PartialEq,Default)]
enum DecodingState {
+ #[default]
Normal,
Waiting,
Flush,
End,
}
-impl Default for DecodingState {
- fn default() -> Self { DecodingState::Normal }
-}
-
impl From<u8> for DecodingState {
fn from(val: u8) -> Self {
match val {
($log: expr; $blk: block) => {};
}
+enum ScaleSize {
+ Auto,
+ Times(f32),
+ Fixed(usize, usize)
+}
+
+impl FromStr for ScaleSize {
+ type Err = ();
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ if matches!(s, "" | "auto") {
+ Ok(ScaleSize::Auto)
+ } else if s.ends_with('x') || s.ends_with('X') {
+ let factor = s[..s.len() - 1].parse::<f32>().map_err(|_| ())?;
+ if factor > 0.0 {
+ Ok(ScaleSize::Times(factor))
+ } else {
+ Err(())
+ }
+ } else if s.contains('x') | s.contains('X') {
+ let mut dims = if s.contains('x') { s.split('x') } else { s.split('X') };
+ let w = dims.next().unwrap();
+ let h = dims.next().unwrap();
+ let width = w.parse::<usize>().map_err(|_| ())?;
+ let height = h.parse::<usize>().map_err(|_| ())?;
+ if width > 0 && height > 0 {
+ Ok(ScaleSize::Fixed(width, height))
+ } else {
+ Err(())
+ }
+ } else {
+ Err(())
+ }
+ }
+}
+
pub enum PktSendEvent {
Packet(NAPacket),
GetFrames,
Audio(Box<dyn NADecoder + Send>),
Video(Box<dyn NADecoder + Send>, Box<dyn FrameReorderer + Send>),
VideoMT(Box<dyn NADecoderMT + Send>, MTFrameReorderer),
+ #[cfg(feature="hwaccel")]
+ VideoHW(Box<dyn HWDecoder + Send>),
}
pub struct DecoderStuff {
fn new(texture_creator: &'a TextureCreator<WindowContext>, width: usize, height: usize, len: usize) -> Self {
let mut pool = Vec::with_capacity(len);
for _ in 0..len + 1 {
- let rgb_tex = texture_creator.create_texture_streaming(PixelFormatEnum::RGB24, width as u32, height as u32).unwrap();
- let yuv_tex = texture_creator.create_texture_streaming(PixelFormatEnum::IYUV, ((width + 1) & !1) as u32, ((height + 1) & !1) as u32).unwrap();
+ let rgb_tex = texture_creator.create_texture_streaming(PixelFormatEnum::RGB24, width as u32, height as u32).expect("failed to create RGB texture");
+ let yuv_tex = texture_creator.create_texture_streaming(PixelFormatEnum::IYUV, ((width + 1) & !1) as u32, ((height + 1) & !1) as u32).expect("failed to create YUV texture");
pool.push(DispFrame{ ts: 0, is_yuv: false, valid: false, rgb_tex, yuv_tex });
}
pool[len].is_yuv = false;
pool[len].rgb_tex.with_lock(None, |buffer: &mut [u8], _pitch: usize| {
for el in buffer.iter_mut() { *el = 0; }
- }).unwrap();
+ }).expect("RGB texture could not be locked");
Self { pool, first_ts: 0, last_ts: 0, start: 0, end: 0, len, width, height }
}
if osd.is_active() {
self.pool[self.len].yuv_tex.with_lock(None, |buffer: &mut [u8], pitch: usize| {
osd.draw_yuv(buffer, pitch);
- }).unwrap();
+ }).expect("YUV texture locking failure");
}
&self.pool[self.len].yuv_tex
} else {
if osd.is_active() {
self.pool[self.len].rgb_tex.with_lock(None, |buffer: &mut [u8], pitch: usize| {
osd.draw_rgb(buffer, pitch);
- }).unwrap();
+ }).expect("RGB texture locking failure");
}
&self.pool[self.len].rgb_tex
}
if osd.is_active() {
frm.yuv_tex.with_lock(None, |buffer: &mut [u8], pitch: usize| {
osd.draw_yuv(buffer, pitch);
- }).unwrap();
+ }).expect("YUV texture locking failure");
}
&frm.yuv_tex
} else {
if osd.is_active() {
frm.rgb_tex.with_lock(None, |buffer: &mut [u8], pitch: usize| {
osd.draw_rgb(buffer, pitch);
- }).unwrap();
+ }).expect("RGB texture locking failure");
}
&frm.rgb_tex
};
canvas.clear();
- canvas.copy(texture, None, None).unwrap();
+ canvas.copy(texture, None, None).expect("canvas blit failure");
canvas.present();
disp_queue.move_start();
sdl_context: sdl2::Sdl,
vsystem: sdl2::VideoSubsystem,
asystem: sdl2::AudioSubsystem,
- xpos: Option<i32>,
- ypos: Option<i32>,
acontrol: AudioControl,
vcontrol: VideoControl,
has_audio: bool,
video_str: u32,
audio_str: u32,
+ sc_size: ScaleSize,
vthreads: usize,
use_mt: bool,
+ #[cfg(feature="hwaccel")]
+ use_hwaccel: bool,
paused: bool,
mute: bool,
impl Player {
fn new() -> Self {
- let sdl_context = sdl2::init().unwrap();
- let vsystem = sdl_context.video().unwrap();
- let asystem = sdl_context.audio().unwrap();
+ let sdl_context = sdl2::init().expect("SDL2 init failure");
+ let vsystem = sdl_context.video().expect("video subsystem init failure");
+ let asystem = sdl_context.audio().expect("audio subsystem init failure");
vsystem.disable_screen_saver();
- let acontrol = AudioControl::new(None, None, &asystem);
+ let acontrol = AudioControl::new(None, None, false, &asystem);
let vcontrol = VideoControl::new(None, 0, 0, 0, 0);
Self {
sdl_context, asystem, vsystem,
- xpos: None,
- ypos: None,
acontrol, vcontrol,
has_audio: false,
video_str: 0,
audio_str: 0,
+ sc_size: ScaleSize::Auto,
vthreads: 3,
use_mt: true,
+ #[cfg(feature="hwaccel")]
+ use_hwaccel: true,
paused: false,
mute: false,
osd: OSD::new(),
#[cfg(feature="debug")]
- logfile: File::create("debug.log").unwrap(),
+ logfile: File::create("debug.log").expect("'debug.log' should be available for writing"),
}
}
fn seek(&mut self, off: u64, fwd: bool, dmx: &mut Demuxer, disp_queue: &mut DispQueue) -> Result<(), ()> {
}
if let Event::Window {win_event: WindowEvent::Exposed, ..} = event {
canvas.clear();
- canvas.copy(disp_queue.get_last_texture(&self.osd), None, None).unwrap();
+ canvas.copy(disp_queue.get_last_texture(&self.osd), None, None).expect("blitting failure");
canvas.present();
}
- if let Event::MouseButtonDown {mouse_btn: MouseButton::Right, ..} = event {
- self.toggle_pause();
+ if let Event::MouseButtonDown {mouse_btn, ..} = event {
+ match mouse_btn {
+ MouseButton::Right => self.toggle_pause(),
+ MouseButton::Middle => self.osd.toggle(),
+ _ => {},
+ };
}
- if let Event::KeyDown {keycode: Some(keycode), ..} = event {
+ if let Event::MouseWheel {direction: MouseWheelDirection::Normal, x: 0, y, ..} = event {
+ self.seek(10, y > 0, dmx, disp_queue)?;
+ }
+ if let Event::KeyDown {keycode: Some(keycode), keymod, ..} = event {
match keycode {
- Keycode::Escape | Keycode::Q => {
+ Keycode::Escape => {
+ self.end = true;
+ println!();
+ return Ok(true);
+ },
+ Keycode::Q if keymod.contains(Mod::RSHIFTMOD) || keymod.contains(Mod::LSHIFTMOD) => {
self.end = true;
println!();
return Ok(true);
},
- Keycode::Return => return Ok(true),
- Keycode::Right => { self.seek(10, true, dmx, disp_queue)?; },
- Keycode::Left => { self.seek(10, false, dmx, disp_queue)?; },
- Keycode::Up => { self.seek(60, true, dmx, disp_queue)?; },
- Keycode::Down => { self.seek(60, false, dmx, disp_queue)?; },
- Keycode::PageUp => { self.seek(600, true, dmx, disp_queue)?; },
- Keycode::PageDown => { self.seek(600, false, dmx, disp_queue)?; },
+ Keycode::Return | Keycode::KpEnter => return Ok(true),
+ Keycode::R => { self.seek(0, true, dmx, disp_queue)?; },
+ Keycode::Right | Keycode::Kp6 => { self.seek(10, true, dmx, disp_queue)?; },
+ Keycode::Left | Keycode::Kp4 => { self.seek(10, false, dmx, disp_queue)?; },
+ Keycode::Up | Keycode::Kp8 => { self.seek(60, true, dmx, disp_queue)?; },
+ Keycode::Down | Keycode::Kp2 => { self.seek(60, false, dmx, disp_queue)?; },
+ Keycode::PageUp | Keycode::Kp9 => { self.seek(600, true, dmx, disp_queue)?; },
+ Keycode::PageDown | Keycode::Kp3 => { self.seek(600, false, dmx, disp_queue)?; },
Keycode::Space => { self.toggle_pause(); },
Keycode::Plus | Keycode::KpPlus => {
self.volume = (self.volume + 10).min(MAX_VOLUME);
self.vcontrol.try_send_video(PktSendEvent::HurryUp);
},
Keycode::O => {
- self.osd.toggle();
+ if keymod.contains(Mod::RSHIFTMOD) || keymod.contains(Mod::LSHIFTMOD) {
+ self.osd.toggle_perm();
+ } else {
+ self.osd.toggle();
+ }
},
_ => {},
};
}
Ok(false)
}
- fn play(&mut self, name: &str, start_time: NATimePoint) {
+ fn play(&mut self, mut window: Window, name: &str, start_time: NATimePoint) -> Window {
debug_log!(self; {format!("Playing {}", name)});
// prepare data source
let path = Path::new(name);
- let mut file = File::open(path).unwrap();
- let dmx_fact;
+ let mut file = if let Ok(handle) = File::open(path) {
+ if let Ok(meta) = handle.metadata() {
+ if meta.is_dir() {
+ return window;
+ }
+ }
+ handle
+ } else {
+ println!("failed to open {}", name);
+ return window;
+ };
let mut fr = FileReader::new_read(&mut file);
let mut br = ByteReader::new(&mut fr);
let res = detect::detect_format(name, &mut br);
if res.is_none() {
println!("cannot detect format for {}", name);
- return;
+ return window;
}
let (dmx_name, _score) = res.unwrap();
debug_log!(self; {format!(" found demuxer {} with score {:?}", dmx_name, _score)});
let ret = dmx_reg.find_demuxer(dmx_name);
if ret.is_none() {
println!("error finding {} demuxer", dmx_name);
- return;
+ return window;
}
- dmx_fact = ret.unwrap();
- br.seek(SeekFrom::Start(0)).unwrap();
+ let dmx_fact = ret.unwrap();
+ br.seek(SeekFrom::Start(0)).expect("should be able to seek to the start");
let ret = create_demuxer(dmx_fact, &mut br);
if ret.is_err() {
println!("error creating demuxer");
- return;
+ return window;
}
let mut dmx = ret.unwrap();
if start_time != NATimePoint::None {
let mut tb_num = 0;
let mut tb_den = 0;
let mut ainfo: Option<NAAudioInfo> = None;
+ let mut sbr_hack = false;
let mut video_dec: Option<DecoderStuff> = None;
let mut audio_dec: Option<DecoderStuff> = None;
let str_id = s.get_id();
if info.is_video() {
if video_dec.is_none() && self.play_video {
+ #[cfg(feature="hwaccel")]
+ if info.get_name() == "h264" && self.use_hwaccel {
+ let mut dec = new_h264_hwdec();
+ let dsupp = Box::new(NADecoderSupport::new());
+ let props = info.get_properties().get_video_info().unwrap();
+ if props.get_width() != 0 {
+ width = props.get_width();
+ height = props.get_height();
+ }
+ if dec.init(info.clone()).is_err() {
+ println!("failed to initialise hwaccel video decoder");
+ } else {
+ video_dec = Some(DecoderStuff{ dsupp, dec: DecoderType::VideoHW(dec) });
+ self.video_str = str_id;
+ let (tbn, tbd) = s.get_timebase();
+ tb_num = tbn;
+ tb_den = tbd;
+ self.has_video = true;
+ println!(" using hardware-accelerated decoding");
+ continue;
+ }
+ }
if let Some(decfunc) = decfunc_mt {
let mut dec = (decfunc)();
let mut dsupp = Box::new(NADecoderSupport::new());
dsupp.pool_u8 = NAVideoBufferPool::new(reorder_depth);
dsupp.pool_u16 = NAVideoBufferPool::new(reorder_depth);
dsupp.pool_u32 = NAVideoBufferPool::new(reorder_depth);
- dec.init(&mut dsupp, info).unwrap();
+ if dec.init(&mut dsupp, info).is_err() {
+ println!("failed to initialise video decoder");
+ return window;
+ }
video_dec = Some(DecoderStuff{ dsupp, dec: DecoderType::Video(dec, reord) });
self.video_str = str_id;
let (tbn, tbd) = s.get_timebase();
let mut dec = (decfunc)();
let mut dsupp = Box::new(NADecoderSupport::new());
ainfo = info.get_properties().get_audio_info();
- dec.init(&mut dsupp, info).unwrap();
+ if let (true, Some(ref ai)) = (info.get_name() == "aac", ainfo) {
+ if ai.sample_rate < 32000 {
+ sbr_hack = true;
+ }
+ }
+ if dec.init(&mut dsupp, info).is_err() {
+ println!("failed to initialise audio decoder");
+ return window;
+ }
audio_dec = Some(DecoderStuff{ dsupp, dec: DecoderType::Audio(dec) });
self.audio_str = str_id;
self.has_audio = true;
}
if !self.has_video && !self.has_audio {
println!("No playable streams found.");
- return;
+ return window;
}
- while (width <= 384) && (height <= 288) {
- width <<= 1;
- height <<= 1;
- }
+ match self.sc_size {
+ ScaleSize::Auto => {
+ while (width <= 384) && (height <= 288) {
+ width <<= 1;
+ height <<= 1;
+ }
+ },
+ ScaleSize::Times(factor) => {
+ let nw = ((width as f32) * factor).ceil() as usize;
+ let nh = ((height as f32) * factor).ceil() as usize;
+ if nw > 0 && nh > 0 {
+ width = nw;
+ height = nh;
+ }
+ },
+ ScaleSize::Fixed(w, h) => {
+ width = w;
+ height = h;
+ },
+ };
// prepare playback structure
let mut new_vcontrol = VideoControl::new(video_dec, width, height, tb_num, tb_den);
std::mem::swap(&mut self.vcontrol, &mut new_vcontrol);
- let mut new_acontrol = AudioControl::new(audio_dec, ainfo, &self.asystem);
+ let mut new_acontrol = AudioControl::new(audio_dec, ainfo, sbr_hack, &self.asystem);
std::mem::swap(&mut self.acontrol, &mut new_acontrol);
if self.mute {
let fname = path.file_name();
let wname = if let Some(fname) = fname {
- "NihAV player - ".to_owned() + fname.to_str().unwrap()
+ // workaround for libSDL2 workaround for non-UTF8 windowing systems
+ // see https://github.com/libsdl-org/SDL/pull/4290 for detais
+ let nname = fname.to_str().expect("should be able to set window title").replace('\u{2013}', "-").replace('\u{2014}', "-");
+ "NihAV player - ".to_owned() + &nname
} else {
"NihAV player".to_owned()
};
- let mut builder = self.vsystem.window(&wname, width as u32, height as u32);
- let window = if let (Some(xpos), Some(ypos)) = (self.xpos, self.ypos) {
- builder.position(xpos, ypos).build().unwrap()
- } else {
- builder.position_centered().build().unwrap()
- };
- let mut canvas = window.into_canvas().build().unwrap();
+ window.set_title(&wname).expect("set window title");
+ if window.size() != (width as u32, height as u32) {
+ window.set_size(width as u32, height as u32).expect("resize window");
+ }
+ window.show();
+ let mut canvas = window.into_canvas().build().expect("should be able to build canvas");
let texture_creator = canvas.texture_creator();
let mut disp_q = DispQueue::new(&texture_creator, width, height, if self.has_video { FRAME_QUEUE_LEN } else { 0 });
if !self.has_video {
canvas.clear();
- canvas.copy(disp_q.get_last_texture(&self.osd), None, None).unwrap();
+ canvas.copy(disp_q.get_last_texture(&self.osd), None, None).expect("blit failure");
canvas.present();
}
self.has_audio = self.acontrol.has_audio();
if !self.has_video && !self.has_audio {
println!("No playable streams.");
- return;
+ return canvas.into_window();
}
// play
new_vcontrol.finish();
std::mem::swap(&mut self.acontrol, &mut new_acontrol);
new_acontrol.finish();
- return;
+ return canvas.into_window();
}
self.tkeep.reset_all(if !disp_q.is_empty() { disp_q.first_ts } else { 0 });
if !self.paused {
self.acontrol.resume();
}
- let mut event_pump = self.sdl_context.event_pump().unwrap();
+ let mut event_pump = self.sdl_context.event_pump().expect("should be able to create event pump");
let mut last_disp = Instant::now();
let mut has_data = true;
'main: loop {
thread::sleep(Duration::from_millis(20));
}
}
- let (xpos, ypos) = canvas.into_window().position();
- self.xpos = Some(xpos);
- self.ypos = Some(ypos);
println!();
std::mem::swap(&mut self.vcontrol, &mut new_vcontrol);
new_vcontrol.finish();
std::mem::swap(&mut self.acontrol, &mut new_acontrol);
new_acontrol.finish();
+ canvas.into_window()
}
}
}
let mut player = Player::new();
+ let mut builder = player.vsystem.window("NihAV Player", 640, 480);
+ let mut window = builder.position_centered().hidden().build().expect("should be able to centre window");
let mut aiter = args.iter().skip(1);
let mut seek_time = NATimePoint::None;
"-ae" => { player.play_audio = true; },
"-vn" => { player.play_video = false; },
"-ve" => { player.play_video = true; },
- "-seek" => {
+ "-seek" | "-start" => {
if let Some(arg) = aiter.next() {
if let Ok(time) = arg.parse::<NATimePoint>() {
seek_time = time;
"-nomt" => {
player.use_mt = false;
},
+ #[cfg(feature="hwaccel")]
+ "-hwaccel" => {
+ player.use_hwaccel = true;
+ },
+ #[cfg(feature="hwaccel")]
+ "-nohwaccel" => {
+ player.use_hwaccel = false;
+ },
"-threads" => {
if let Some(arg) = aiter.next() {
if let Ok(val) = arg.parse::<usize>() {
}
}
},
+ "-scale" => {
+ if let Some(arg) = aiter.next() {
+ if let Ok(ssize) = arg.parse::<ScaleSize>() {
+ player.sc_size = ssize;
+ } else {
+ println!("invalid scale size");
+ }
+ }
+ },
_ => {
- player.play(arg, seek_time);
+ window = player.play(window, arg, seek_time);
if player.end { break; }
seek_time = NATimePoint::None;
},