}
}
}
- let time = NATimeInfo::ts_to_time(pts, 1000, tb_nums[stream_no], tb_dens[stream_no]);
+ let time = NATimeInfo::rescale_ts(pts, tb_nums[stream_no], tb_dens[stream_no], 1, 1000);
for avi_str in avi_streams.iter_mut() {
if avi_str.strm_no == (stream_no as u8) {
if stream.get_media_type() == StreamType::Video {
let (tb_num, tb_den) = stream.get_timebase();
let pts = counter[stream_no];
- let time = NATimeInfo::ts_to_time(pts, 1000, tb_num, tb_den);
+ let time = NATimeInfo::rescale_ts(pts, tb_num, tb_den, 1, 1000);
validate!(offset >= movi_pos);
seek_idx.add_entry(stream_no as u32, SeekEntry { time, pts, pos: offset });
}
let _size = src.read_u32le()?;
let pts = start + (i as u64);
- let time = NATimeInfo::ts_to_time(pts, 1000, tb_num, tb_den);
+ let time = NATimeInfo::rescale_ts(pts, tb_num, tb_den, 1, 1000);
seek_idx.add_entry(stream_no as u32, SeekEntry { time, pts, pos: base_offset + u64::from(offset - 8) });
}
let mut tsearch = TimeSearcher::new();
for kf_time in self.keyframes.iter() {
let pts = tsearch.map_time(*kf_time - 1, &self.time_to_sample);
- let time = NATimeInfo::ts_to_time(pts, 1000, self.tb_num, self.tb_den);
+ let time = NATimeInfo::rescale_ts(pts, self.tb_num, self.tb_den, 1, 1000);
seek_index.add_entry(self.track_no, SeekEntry { time, pts: u64::from(*kf_time - 1), pos: 0 });
}
}
self.cur_ts = None;
if self.stream_type == StreamType::Audio {
if let NATimePoint::Milliseconds(ms) = tpoint {
- let exp_pts = NATimeInfo::time_to_ts(ms, 1000, self.tb_num, self.tb_den);
+ let exp_pts = NATimeInfo::rescale_ts(ms, 1, 1000, self.tb_num, self.tb_den);
if self.raw_audio {
if self.frame_samples != 0 {
self.raw_apos = exp_pts / (self.frame_samples as u64);
self.cur_chunk += 1;
}
let cur_pts = self.timesearch.map_time(self.cur_sample as u32, &self.time_to_sample);
- let cur_time = NATimeInfo::ts_to_time(cur_pts, 1000, self.tb_num, self.tb_den);
+ let cur_time = NATimeInfo::rescale_ts(cur_pts, self.tb_num, self.tb_den, 1, 1000);
Ok(cur_time)
}
}
fn process_packet(src: &mut dyn ByteIO, strmgr: &StreamManager, track: &mut Track, pts: NATimeInfo, offset: u64, size: usize, first: bool) -> DemuxerResult<NAPacket> {
if let Some(cpts) = pts.get_pts() {
- let ts = NATimeInfo::ts_to_time(cpts, 1000, pts.tb_num, pts.tb_den);
+ let ts = NATimeInfo::rescale_ts(cpts, pts.tb_num, pts.tb_den, 1, 1000);
track.cur_ts = Some(ts);
} else {
track.cur_ts = None;
if vtime.max(atime) - vtime.min(atime) > 500 && atime != 0 {
for track in self.tracks.iter_mut() {
if track.stream_type == StreamType::Audio {
- let new_pts = NATimeInfo::time_to_ts(vtime, 1000, track.tb_num, track.tb_den);
+ let new_pts = NATimeInfo::rescale_ts(vtime, 1, 1000, track.tb_num, track.tb_den);
track.seek(new_pts, NATimePoint::Milliseconds(vtime))?;
}
}
if (self.fps_num == 0) || (self.fps_den == 0) {
return Err(DemuxerError::SeekError);
}
- NATimeInfo::time_to_ts(ms, 1000, self.fps_num, self.fps_den)
+ NATimeInfo::rescale_ts(ms, 1, 1000, self.fps_num, self.fps_den)
},
NATimePoint::None => return Err(DemuxerError::SeekError),
};
let hdrl_pos = self.bw.tell() + 20;
self.bw.write_buf(b"RIFF\0\0\0\0AVI LIST\0\0\0\0hdrlavih")?;
self.bw.write_u32le(56)?; // avih size
- let ms_per_frame = NATimeInfo::ts_to_time(1, 1000000, tb_num, tb_den);
+ let ms_per_frame = NATimeInfo::rescale_ts(1, tb_num, tb_den, 1, 1000000);
self.bw.write_u32le(ms_per_frame as u32)?;
self.bw.write_u32le(0)?; // max transfer rate
self.bw.write_u32le(0)?; // padding granularity
if !self.single {
let vstr = strmgr.get_stream(0).unwrap();
- let delay = NATimeInfo::ts_to_time(1, 100, vstr.tb_num, vstr.tb_den) as u16;
+ let delay = NATimeInfo::rescale_ts(1, vstr.tb_num, vstr.tb_den, 1, 100) as u16;
self.bw.write_byte(0x21)?; // graphic control
self.bw.write_byte(0xF9)?; // graphic control extension
self.bw.write_byte(4)?; // block size
let mut duration = 0;
for stream in self.streams.iter() {
if stream.duration > 0 {
- let dur = NATimeInfo::ts_to_time(stream.duration, 1000, stream.tb_num, stream.tb_den);
+ let dur = NATimeInfo::rescale_ts(stream.duration, stream.tb_num, stream.tb_den, 1, 1000);
if duration < dur {
duration = dur;
}
let mut duration = 0;
for stream in self.streams.iter() {
if stream.duration > 0 {
- let dur = NATimeInfo::ts_to_time(stream.duration, 1000, stream.tb_num, stream.tb_den);
+ let dur = NATimeInfo::rescale_ts(stream.duration, stream.tb_num, stream.tb_den, 1, 1000);
if duration < dur {
duration = dur;
}
/// Sets new duration.
pub fn set_duration(&mut self, dur: Option<u64>) { self.duration = dur; }
- /// Converts time in given scale into timestamp in given base.
- #[allow(clippy::collapsible_if)]
- #[allow(clippy::collapsible_else_if)]
- pub fn time_to_ts(time: u64, base: u64, tb_num: u32, tb_den: u32) -> u64 {
- let tb_num = u64::from(tb_num);
- let tb_den = u64::from(tb_den);
- let tmp = time.checked_mul(tb_den);
- if let Some(tmp) = tmp {
- tmp / base / tb_num
- } else {
- if tb_num < base {
- let coarse = time / tb_num;
- if let Some(tmp) = coarse.checked_mul(tb_den) {
- tmp / base
- } else {
- (coarse / base) * tb_den
- }
- } else {
- let coarse = time / base;
- if let Some(tmp) = coarse.checked_mul(tb_den) {
- tmp / tb_num
- } else {
- (coarse / tb_num) * tb_den
- }
- }
- }
- }
- /// Converts timestamp in given base into time in given scale.
- pub fn ts_to_time(ts: u64, base: u64, tb_num: u32, tb_den: u32) -> u64 {
- let tb_num = u64::from(tb_num);
- let tb_den = u64::from(tb_den);
- let tmp = ts.checked_mul(base);
- if let Some(tmp) = tmp {
- let tmp2 = tmp.checked_mul(tb_num);
- if let Some(tmp2) = tmp2 {
- tmp2 / tb_den
- } else {
- (tmp / tb_den) * tb_num
- }
- } else {
- let tmp = ts.checked_mul(tb_num);
- if let Some(tmp) = tmp {
- (tmp / tb_den) * base
- } else {
- (ts / tb_den) * base * tb_num
+ /// Converts time in one given scale into another base.
+ ///
+ /// For instance, converting timestamp from 2/25 rate to milliseconds can be done as
+ /// ```
+ /// let pts = 42;
+ /// let time = NATimeInfo::rescale_ts(pts, 2, 25, 1, 1000);
+ /// ```
+ ///
+ /// while back conversion is equally simple:
+ /// ```
+ /// let millis = 42;
+ /// let ts = NATimeInfo::rescale_ts(millis, 1, 1000, 2, 25);
+ /// ```
+ pub fn rescale_ts(ts: u64, src_tb_num: u32, src_tb_den: u32, dst_tb_num: u32, dst_tb_den: u32) -> u64 {
+ let src_tb_num = u64::from(src_tb_num);
+ let src_tb_den = u64::from(src_tb_den);
+ let dst_tb_num = u64::from(dst_tb_num);
+ let dst_tb_den = u64::from(dst_tb_den);
+
+ // ts * (src_tb_num / src_tb_den) / (dst_tb_num / dst_tb_den)
+ // reordered to retain the maximum precision, so multiplications go first
+ if let Some(nval) = ts.checked_mul(dst_tb_den) {
+ if let Some(nnval) = nval.checked_mul(src_tb_num) {
+ nnval / src_tb_den / dst_tb_num
+ } else { // intermediate result is too large, use coarse approximation
+ (nval / dst_tb_num).saturating_mul(src_tb_num) / src_tb_den
}
+ } else { // intermediate result is too large, use coarse approximation
+ (ts.saturating_mul(src_tb_num) / src_tb_den).saturating_mul(dst_tb_num) / dst_tb_den
}
}
fn get_cur_ts(&self) -> u64 { self.pts.unwrap_or_else(|| self.dts.unwrap_or(0)) }
fn get_cur_millis(&self) -> u64 {
let ts = self.get_cur_ts();
- Self::ts_to_time(ts, 1000, self.tb_num, self.tb_den)
+ Self::rescale_ts(ts, self.tb_num, self.tb_den, 1, 1000)
}
/// Checks whether the current time information is earler than provided reference time.
pub fn less_than(&self, time: NATimePoint) -> bool {
fn mux_frame(&mut self, _strmgr: &StreamManager, pkt: NAPacket) -> MuxerResult<()> {
let stream = pkt.get_stream();
let pts = pkt.get_pts().unwrap_or(0);
- let ms = NATimeInfo::ts_to_time(pts, 1000, pkt.ts.tb_num, pkt.ts.tb_den) as u32;
+ let ms = NATimeInfo::rescale_ts(pts, pkt.ts.tb_num, pkt.ts.tb_den, 1, 1000) as u32;
self.time = self.time.max(ms);
match stream.get_media_type() {
StreamType::Video => {
},
AVC_ID => {
self.bw.write_byte(1)?;
- let cms = NATimeInfo::ts_to_time(pkt.get_pts().unwrap_or(pts), 1000, pkt.ts.tb_num, pkt.ts.tb_den) as u32;
+ let cms = NATimeInfo::rescale_ts(pkt.get_pts().unwrap_or(pts), pkt.ts.tb_num, pkt.ts.tb_den, 1, 1000) as u32;
let cts = cms.wrapping_sub(ms) << 8 >> 8;
self.bw.write_u24be(cts)?;
},
self.src.seek(SeekFrom::Start(self.data_start + seek_info.pos))?;
Ok(())
} else if let NATimePoint::Milliseconds(ms) = time {
- let samppos = NATimeInfo::time_to_ts(ms, 1000, 1, self.srate);
+ let samppos = NATimeInfo::rescale_ts(ms, 1, 1000, 1, self.srate);
if self.known_frames.last().unwrap_or(&FrameSeekInfo::default()).sampleend >= samppos {
for point in self.known_frames.iter().rev() {
if point.samplepos <= samppos {
if &tag == b"dibc" {
let (tb_num, tb_den) = stream.get_timebase();
let pts = counter;
- let time = NATimeInfo::ts_to_time(pts, 1000, tb_num, tb_den);
+ let time = NATimeInfo::rescale_ts(pts, tb_num, tb_den, 1, 1000);
validate!(offset >= movi_pos);
seek_idx.add_entry(0, SeekEntry { time, pts, pos: offset });
key_offs.push(offset);
}
fn seek(&mut self, time: NATimePoint, seek_idx: &SeekIndex) -> DemuxerResult<()> {
let seek_ts = match time {
- NATimePoint::Milliseconds(ms) => NATimeInfo::time_to_ts(ms, 1000, self.tb_num, self.tb_den),
+ NATimePoint::Milliseconds(ms) => NATimeInfo::rescale_ts(ms, 1, 1000, self.tb_num, self.tb_den),
NATimePoint::PTS(ts) => ts,
_ => return Err(DemuxerError::SeekError),
};
let buf = frm.get_buffer();
let tinfo = frm.get_time_information();
- let pts = NATimeInfo::ts_to_time(tinfo.pts.unwrap_or(0), 1000, tinfo.tb_num, tinfo.tb_den);
+ let pts = NATimeInfo::rescale_ts(tinfo.pts.unwrap_or(0), tinfo.tb_num, tinfo.tb_den, 1, 1000);
let fpts = (pts & 0x1FFF) as u32;
let ts_diff = if ftype == FrameType::B {
fn write_packet(&mut self, bw: &mut dyn ByteIO, pkt: NAPacket, pkt_no: &mut u32) -> MuxerResult<()> {
if let Some(pts) = pkt.get_pts() {
let (tb_num, tb_den) = pkt.get_stream().get_timebase();
- let ms = NATimeInfo::ts_to_time(pts, 1000, tb_num, tb_den) as u32;
+ let ms = NATimeInfo::rescale_ts(pts, tb_num, tb_den, 1, 1000) as u32;
self.time = self.time.max(ms);
self.cur_time = ms;
}