extern crate nihav_allstuff;
use std::env;
+use std::collections::VecDeque;
use std::fs::File;
#[cfg(not(target_os = "windows"))]
use std::io::{BufRead, BufReader};
use videodec::*;
mod osd;
use osd::*;
+mod pool;
+use pool::*;
#[repr(u8)]
#[derive(Clone,Copy,Debug,PartialEq,Default)]
}
}
-pub struct DispFrame<'a> {
- pub ts: u64,
- pub is_yuv: bool,
- pub valid: bool,
- pub rgb_tex: Texture<'a>,
- pub yuv_tex: Texture<'a>,
-}
-
pub struct DispQueue<'a> {
- pub pool: Vec<DispFrame<'a>>,
+ pub pool: SDLPool<'a>,
+ pub queue: VecDeque<SDLFrame<'a>>,
pub first_ts: u64,
pub ts_valid: bool,
pub last_ts: u64,
- pub start: usize,
- pub end: usize,
pub len: usize,
pub width: usize,
pub height: usize,
pub osd_tex: Texture<'a>,
+ pub empty_tex: Texture<'a>,
}
impl<'a> DispQueue<'a> {
fn new(texture_creator: &'a TextureCreator<WindowContext>, width: usize, height: usize, len: usize) -> Self {
- let mut pool = Vec::with_capacity(len);
- for _ in 0..len + 1 {
- let mut rgb_tex = texture_creator.create_texture_streaming(PixelFormatEnum::RGB24, width as u32, height as u32).expect("failed to create RGB texture");
- let mut yuv_tex = texture_creator.create_texture_streaming(PixelFormatEnum::IYUV, ((width + 1) & !1) as u32, ((height + 1) & !1) as u32).expect("failed to create YUV texture");
- rgb_tex.set_blend_mode(BlendMode::None);
- yuv_tex.set_blend_mode(BlendMode::None);
- pool.push(DispFrame{ ts: 0, is_yuv: false, valid: false, rgb_tex, yuv_tex });
- }
- pool[len].is_yuv = false;
- pool[len].rgb_tex.with_lock(None, |buffer: &mut [u8], _pitch: usize| {
+ let pool = SDLPool::new(texture_creator, width, height, len);
+
+ let mut empty_tex = texture_creator.create_texture_streaming(PixelFormatEnum::RGB24, width as u32, height as u32).expect("failed to create RGB texture");
+ empty_tex.with_lock(None, |buffer: &mut [u8], _pitch: usize| {
for el in buffer.iter_mut() { *el = 0; }
- }).expect("RGB texture could not be locked");
+ }).expect("surface should be locked");
let mut osd_tex = texture_creator.create_texture_streaming(PixelFormatEnum::RGBA8888, width as u32, OSD_HEIGHT as u32).expect("failed to create RGBA texture");
osd_tex.set_blend_mode(BlendMode::Blend);
- Self { pool, osd_tex, first_ts: 0, last_ts: 0, ts_valid: false, start: 0, end: 0, len, width, height }
+ Self { pool, osd_tex, empty_tex, first_ts: 0, last_ts: 0, ts_valid: false, len, width, height, queue: VecDeque::with_capacity(len) }
}
fn flush(&mut self) {
- self.start = 0;
- self.end = 0;
self.first_ts = 0;
self.last_ts = 0;
self.ts_valid = false;
- for frm in self.pool.iter_mut() {
- frm.valid = false;
+ for frm in self.queue.drain(..) {
+ self.pool.return_frame(frm);
}
}
fn get_last_texture(&mut self) -> &Texture<'a> {
- if self.pool[self.len].is_yuv {
- &self.pool[self.len].yuv_tex
- } else {
- &self.pool[self.len].rgb_tex
- }
- }
- pub fn is_empty(&self) -> bool { self.start == self.end }
- pub fn is_full(&self) -> bool { self.len == 0 || self.start == (self.end + 1) % self.len }
- pub fn move_end(&mut self) {
- self.end += 1;
- if self.end >= self.len {
- self.end -= self.len;
- }
+ &self.empty_tex
}
+ pub fn is_empty(&self) -> bool { self.queue.is_empty() }
+ pub fn is_full(&self) -> bool { self.queue.len() >= self.len }
pub fn move_start(&mut self) {
- self.pool.swap(self.start, self.len);
- self.start += 1;
- if self.start >= self.len {
- self.start -= self.len;
+ if let Some(frm) = self.queue.pop_front() {
+ self.pool.return_frame(frm);
}
- if !self.is_empty() {
- self.first_ts = self.pool[self.start].ts;
+ self.update();
+ }
+ pub fn update(&mut self) {
+ if let Some(frm) = self.queue.front() {
+ self.first_ts = frm.time;
} else {
self.ts_valid = false;
}
if osd.is_active() {
osd.prepare(ctime);
}
- let frm = &mut disp_queue.pool[disp_queue.start];
- let texture = if frm.is_yuv {
- &frm.yuv_tex
- } else {
- &frm.rgb_tex
- };
+ let frm = disp_queue.queue.pop_front().unwrap();
canvas.clear();
- canvas.copy(texture, None, None).expect("canvas blit failure");
+ canvas.copy(&frm.tex, frm.rect, None).expect("canvas blit failure");
if osd.is_active() {
draw_osd(disp_queue, canvas, osd);
}
canvas.present();
- disp_queue.move_start();
+ disp_queue.pool.return_frame(frm);
+ disp_queue.update();
if !disp_queue.is_empty() {
return Some(disp_queue.first_ts.saturating_sub(ctime).saturating_sub(2));
} else {
force_astr: Option<u32>,
sc_size: ScaleSize,
sc_dbg: bool,
+ no_sdl_scale: bool,
force_dmx: Option<String>,
vthreads: usize,
force_astr: None,
sc_size: ScaleSize::Auto,
sc_dbg: false,
+ no_sdl_scale: false,
force_dmx: None,
vthreads: 3,
}
}
+ let mut dec_w = width;
+ let mut dec_h = height;
match self.sc_size {
ScaleSize::Auto if self.thr_w > 0 && self.thr_h > 0 => {
while (width <= self.thr_w) && (height <= self.thr_h) {
height = h;
},
};
+ if self.no_sdl_scale {
+ dec_w = width;
+ dec_h = height;
+ }
// prepare playback structure
- let mut new_vcontrol = VideoControl::new(video_dec, VideoParams { width, height, tb_num, tb_den }, self.sc_dbg);
+ let mut new_vcontrol = VideoControl::new(video_dec, VideoParams { width: dec_w, height: dec_h, tb_num, tb_den }, self.sc_dbg);
std::mem::swap(&mut self.vcontrol, &mut new_vcontrol);
let mut new_acontrol = AudioControl::new(audio_dec, ainfo, sbr_hack, &self.asystem);
new_acontrol.finish();
return canvas.into_window();
}
- self.tkeep.reset_all(if !disp_q.is_empty() { disp_q.first_ts } else { 0 });
+ self.tkeep.reset_all(if !disp_q.queue.is_empty() { disp_q.first_ts } else { 0 });
if !self.paused {
self.acontrol.resume();
}
if !self.debug {
print!(" {} {}% \r", format_time(c_time), self.acontrol.get_volume());
} else {
- print!(" {} {} {}% {:3} {:6}\r", format_time(c_time), if self.vcontrol.is_yuv() { 'Y' } else { 'R' }, self.acontrol.get_volume(), (disp_q.end + disp_q.len - disp_q.start) % disp_q.len, self.acontrol.get_fill());
+ print!(" {} {} {}% {:3} {:6}\r", format_time(c_time), if self.vcontrol.is_yuv() { 'Y' } else { 'R' }, self.acontrol.get_volume(), disp_q.queue.len(), self.acontrol.get_fill());
}
}
std::io::stdout().flush().unwrap();
"-scale-debug" => {
player.sc_dbg = true;
},
+ "-sdl-scale" => {
+ player.no_sdl_scale = false;
+ },
+ "-no-sdl-scale" => {
+ player.no_sdl_scale = true;
+ },
"-quiet" => {
player.quiet = true;
},
--- /dev/null
+use nihav_core::frame::*;
+use sdl2::pixels::PixelFormatEnum;
+use sdl2::rect::Rect;
+use sdl2::render::*;
+use sdl2::video::WindowContext;
+
+pub struct SDLFrame<'r> {
+ pub id: u32,
+ pub time: u64,
+ pub rect: Rect,
+ pub tex: Texture<'r>,
+ pub yuv: bool,
+}
+
+impl<'r> SDLFrame<'r> {
+ pub fn load_frame_yuv(&mut self, buf: NAVideoBufferRef<u8>) {
+ let (width, height) = buf.get_dimensions(0);
+ let aheight = (height + 1) & !1;
+ let src = buf.get_data();
+ let ysstride = buf.get_stride(0);
+ let ysrc = &src[buf.get_offset(0)..][..ysstride * aheight];
+ let usstride = buf.get_stride(2);
+ let usrc = &src[buf.get_offset(2)..][..usstride * (aheight / 2)];
+ let vsstride = buf.get_stride(1);
+ let vsrc = &src[buf.get_offset(1)..][..vsstride * (aheight / 2)];
+ self.rect = Rect::new(0, 0, (width as u32 + 1) & !1, aheight as u32);
+ self.tex.update_yuv(self.rect, ysrc, ysstride, vsrc, vsstride, usrc, usstride)
+ .expect("YUV surface updated");
+ }
+ pub fn load_frame_rgb(&mut self, buf: NAVideoBufferRef<u8>) {
+ let (width, height) = buf.get_dimensions(0);
+ self.rect = Rect::new(0, 0, width as u32, height as u32);
+ let sstride = buf.get_stride(0);
+ let src = buf.get_data();
+ self.tex.update(self.rect, src, sstride).expect("updated");
+ }
+}
+
+pub struct SDLPool<'r> {
+ pub yuv_pool: Vec<SDLFrame<'r>>,
+ pub rgb_pool: Vec<SDLFrame<'r>>,
+}
+
+impl<'r> SDLPool<'r> {
+ pub fn new(texture_creator: &'r TextureCreator<WindowContext>, width: usize, height: usize, len: usize) -> Self {
+ let mut yuv_pool = Vec::with_capacity(len);
+ let mut rgb_pool = Vec::with_capacity(len);
+ let rect = Rect::new(0, 0, width as u32, height as u32);
+ for i in 0..len {
+ let mut rgb_tex = texture_creator.create_texture_streaming(PixelFormatEnum::RGB24, width as u32, height as u32).expect("failed to create RGB texture");
+ let mut yuv_tex = texture_creator.create_texture_streaming(PixelFormatEnum::IYUV, ((width + 1) & !1) as u32, ((height + 1) & !1) as u32).expect("failed to create YUV texture");
+ rgb_tex.set_blend_mode(BlendMode::None);
+ //rgb_tex.set_scale_mode(ScaleMode::Nearest);
+ yuv_tex.set_blend_mode(BlendMode::None);
+ rgb_pool.push(SDLFrame{ id: i as u32, time: 0, rect, tex: rgb_tex, yuv: false });
+ yuv_pool.push(SDLFrame{ id: i as u32, time: 0, rect, tex: yuv_tex, yuv: true });
+ }
+ Self { yuv_pool, rgb_pool }
+ }
+ pub fn is_empty(&self) -> bool { self.yuv_pool.is_empty() || self.rgb_pool.is_empty() }
+ pub fn get_frame(&mut self, yuv: bool) -> Option<SDLFrame<'r>> {
+ if yuv { self.yuv_pool.pop() } else { self.rgb_pool.pop() }
+ }
+ pub fn return_frame(&mut self, frm: SDLFrame<'r>) {
+ if frm.yuv {
+ self.yuv_pool.push(frm);
+ } else {
+ self.rgb_pool.push(frm);
+ }
+ }
+}
use std::sync::mpsc::{Receiver, SyncSender, TrySendError};
use std::thread;
-use sdl2::render::Texture;
-
-use nihav_core::frame::{NABufferType, NAVideoBuffer};
+use nihav_core::frame::NABufferType;
use nihav_core::formats::*;
use nihav_core::codecs::*;
use nihav_core::scale::*;
}
}
-fn output_yuv(yuv_texture: &mut Texture, buf: &NAVideoBuffer<u8>, width: usize, height: usize) {
- let aheight = (height + 1) & !1;
- let src = buf.get_data();
- let ysstride = buf.get_stride(0);
- let ysrc = &src[buf.get_offset(0)..][..ysstride * aheight];
- let usstride = buf.get_stride(2);
- let usrc = &src[buf.get_offset(2)..][..usstride * (aheight / 2)];
- let vsstride = buf.get_stride(1);
- let vsrc = &src[buf.get_offset(1)..][..vsstride * (aheight / 2)];
- let rect = Some(sdl2::rect::Rect::new(0, 0, (width as u32 + 1) & !1, aheight as u32));
- yuv_texture.update_yuv(rect, ysrc, ysstride, vsrc, vsstride, usrc, usstride)
- .expect("YUV surface updated");
-}
-
pub struct VideoControl {
vqueue: Vec<PktSendEvent>,
pub fn is_yuv(&self) -> bool { self.do_yuv }
pub fn fill(&mut self, disp_queue: &mut DispQueue) {
- while !disp_queue.is_full() {
+ while !disp_queue.pool.is_empty() {
if let Ok((pic, time)) = self.vfrecv.try_recv() {
let buf = pic.get_vbuf().expect("video frame should be of u8 type");
self.do_yuv = buf.get_info().get_format().get_model().is_yuv();
- let idx = disp_queue.end;
- disp_queue.move_end();
- let frm = &mut disp_queue.pool[idx];
- if !self.do_yuv {
- let sstride = buf.get_stride(0);
- let src = buf.get_data();
- frm.rgb_tex.with_lock(None, |buffer: &mut [u8], pitch: usize| {
- let csize = sstride.min(pitch);
- for (dst, src) in buffer.chunks_mut(pitch).zip(src.chunks(sstride)) {
- dst[..csize].copy_from_slice(&src[..csize]);
- }
- true
- }).expect("surface should be locked");
+ let mut frm = disp_queue.pool.get_frame(self.do_yuv).unwrap();
+ if frm.yuv {
+ frm.load_frame_yuv(buf);
} else {
- output_yuv(&mut frm.yuv_tex, &buf, disp_queue.width, disp_queue.height);
+ frm.load_frame_rgb(buf);
}
- frm.valid = true;
- frm.is_yuv = self.do_yuv;
- frm.ts = time;
+ frm.time = time;
if !disp_queue.ts_valid {
disp_queue.first_ts = time;
disp_queue.ts_valid = true;
}
+ disp_queue.queue.push_back(frm);
disp_queue.last_ts = time;
} else {
break;