let args: Vec<_> = env::args().collect();
if args.len() == 1 {
- println!("usage: nihav-tool [-noout] [-vn] [-an] input [lastpts]");
+ println!("usage: nihav-tool [-noout] [-vn] [-an] input [endtime]");
+ println!(" or invoke nihav-tool --help for more detailed information");
return;
}
- let mut lastpts: Option<u64> = None;
+ if args.len() == 2 && args[1] == "--help" {
+ println!("usage: nihav-tool [options] input [endtime]");
+ println!("available options:");
+ println!(" -noout - decode but do not write output");
+ println!(" -an - do not decode audio streams");
+ println!(" -vn - do not decode video streams");
+ println!(" -nm={{count,pktpts,frmpts}} - use counter/frame PTS/decoded PTS as output image number");
+ println!(" -skip={{key,intra}} - decode only reference frames (I-/P-) or intra frames only");
+ println!(" -seek time - try seeking to the given time before starting decoding");
+ println!(" -apfx/-vpfx prefix - use given prefix when creating output audio/video files instead of default 'out'");
+ println!(" -ignerr - keep decoding even if decoding error is encountered");
+ println!(" -dumpfrm - dump raw frame data for all streams");
+ println!(" endtime - decoding end time, can be given either as time (hh:mm:ss.ms) or as a timestamp (e.g. 42pts)");
+ return;
+ }
+ let mut lastpts = NATimePoint::None;
let mut cur_arg: usize = 1;
let mut noout = false;
let mut decode_video = true;
let mut decode_audio = true;
let mut nmode = NumberMode::FrmPTS;
let mut smode = FrameSkipMode::None;
- let mut seek_time = 0u64;
+ let mut seek_time = NATimePoint::None;
let mut vpfx: Option<String> = None;
let mut apfx: Option<&str> = None;
let mut ignore_errors = false;
println!("seek time missing");
return;
}
- let ret = args[cur_arg].parse::<u64>();
+ let ret = args[cur_arg].parse::<NATimePoint>();
if ret.is_err() {
println!("wrong seek time");
return;
let name = args[cur_arg].as_str();
cur_arg += 1;
if cur_arg < args.len() {
- lastpts = Some(u64::from_str_radix(args[cur_arg].as_str(), 10).unwrap());
+ let ret = args[cur_arg].parse::<NATimePoint>();
+ if ret.is_err() {
+ println!("cannot parse end time");
+ return;
+ }
+ lastpts = ret.unwrap();
}
let path = Path::new(name);
let mut dmx_reg = RegisteredDemuxers::new();
nihav_register_all_demuxers(&mut dmx_reg);
let mut dec_reg = RegisteredDecoders::new();
- nihav_register_all_codecs(&mut dec_reg);
+ nihav_register_all_decoders(&mut dec_reg);
dmx_fact = dmx_reg.find_demuxer(dmx_name).unwrap();
br.seek(SeekFrom::Start(0)).unwrap();
let mut dmx = create_demuxer(dmx_fact, &mut br).unwrap();
- if seek_time > 0 {
+ if seek_time != NATimePoint::None {
let ret = dmx.seek(seek_time);
if ret.is_err() {
println!(" seek error {:?}", ret.err().unwrap());
}
} else {
decs.push(None);
-panic!("decoder {} not found", info.get_name());
}
if !has_out {
writers.push(Outputter::None);
};
}
},
- Err(DecoderError::MissingReference) if seek_time > 0 => {
+ Err(DecoderError::MissingReference) if seek_time != NATimePoint::None => {
println!("ignoring missing ref");
},
Err(reason) => {
}
},
};
- if pkt.get_pts() != None && lastpts.is_some() && pkt.get_pts() >= lastpts { break; }
+ if pkt.get_pts() != None && lastpts != NATimePoint::None && !pkt.ts.less_than(lastpts) { break; }
}
}
//panic!("end");