blockdsp::copy_block(&mut dst, src.clone(), 0, xpos, ypos, mv.x >> 1, mv.y >> 1, 16, 16, 0, 1, mode, H263_INTERP_FUNCS);
blockdsp::copy_block(&mut dst, src.clone(), 1, xpos >> 1, ypos >> 1, mv.x >> 2, mv.y >> 2, 8, 8, 0, 1, cmode, H263_INTERP_FUNCS);
- blockdsp::copy_block(&mut dst, src.clone(), 2, xpos >> 1, ypos >> 1, mv.x >> 2, mv.y >> 2, 8, 8, 0, 1, cmode, H263_INTERP_FUNCS);
+ blockdsp::copy_block(&mut dst, src, 2, xpos >> 1, ypos >> 1, mv.x >> 2, mv.y >> 2, 8, 8, 0, 1, cmode, H263_INTERP_FUNCS);
}
fn copy_blocks8x8(&self, dst: &mut NAVideoBuffer<u8>, src: NAVideoBufferRef<u8>, xpos: usize, ypos: usize, mvs: &[MV; 4]) {
let mut dst = NASimpleVideoFrame::from_video_buf(dst).unwrap();
blockdsp::copy_block(&mut dst, src.clone(), 0, xpos, ypos, mv.x >> 1, mv.y >> 1, 16, 16, 0, 1, mode, H263_INTERP_AVG_FUNCS);
blockdsp::copy_block(&mut dst, src.clone(), 1, xpos >> 1, ypos >> 1, mv.x >> 2, mv.y >> 2, 8, 8, 0, 1, cmode, H263_INTERP_AVG_FUNCS);
- blockdsp::copy_block(&mut dst, src.clone(), 2, xpos >> 1, ypos >> 1, mv.x >> 2, mv.y >> 2, 8, 8, 0, 1, cmode, H263_INTERP_AVG_FUNCS);
+ blockdsp::copy_block(&mut dst, src, 2, xpos >> 1, ypos >> 1, mv.x >> 2, mv.y >> 2, 8, 8, 0, 1, cmode, H263_INTERP_AVG_FUNCS);
}
fn avg_blocks8x8(&self, dst: &mut NAVideoBuffer<u8>, src: NAVideoBufferRef<u8>, xpos: usize, ypos: usize, mvs: &[MV; 4]) {
let mut dst = NASimpleVideoFrame::from_video_buf(dst).unwrap();
}
Ok(())
}
+ #[allow(clippy::comparison_chain)]
fn decode_intra_mb_pred_quant(&mut self, bd: &mut dyn BlockDecoder, bdsp: &dyn BlockDSP, mb_pos: usize, binfo: &BlockInfo, sstate: &SliceState, apply_acpred: bool) -> DecoderResult<()> {
for i in 0..6 {
bd.decode_block_intra(&binfo, &sstate, binfo.get_q(), i, (binfo.cbp & (1 << (5 - i))) != 0, &mut self.blk[i])?;
bi.mv_f[blk_no]
}
}
- #[allow(clippy::cyclomatic_complexity)]
+ #[allow(clippy::cognitive_complexity)]
fn reconstruct_obmc(&mut self, buf: &mut NAVideoBuffer<u8>, slice_start: usize, start: usize, end: usize, slice_end: bool) -> usize {
let mut mb_x = start % self.mb_w;
let mut mb_y = start / self.mb_w;
}
mb_pos
}
- #[allow(clippy::cyclomatic_complexity)]
+ #[allow(clippy::cognitive_complexity)]
pub fn parse_frame(&mut self, bd: &mut dyn BlockDecoder, bdsp: &dyn BlockDSP) -> DecoderResult<NABufferType> {
let pinfo = bd.decode_pichdr()?;
let mut mvi = MVInfo::new();
clu1.calc_dist();
clu0.dist + clu1.dist
}
- #[allow(clippy::cyclomatic_complexity)]
+ #[allow(clippy::cognitive_complexity)]
pub fn quantise(&mut self, src: &[T], dst: &mut [T]) -> usize {
if src.is_empty() || dst.len() != self.clusters.len() {
return 0;
let scale_cb = Codebook::new(&mut coderead, CodebookMode::MSB).unwrap();
let mut spec_cb: [Codebook<u16>; 11];
unsafe {
- spec_cb = mem::uninitialized();
+ spec_cb = mem::MaybeUninit::uninit().assume_init();
for i in 0..AAC_SPEC_CODES.len() {
let mut coderead = TableCodebookDescReader::new(AAC_SPEC_CODES[i], AAC_SPEC_BITS[i], cb_map);
ptr::write(&mut spec_cb[i], Codebook::new(&mut coderead, CodebookMode::MSB).unwrap());
tmp: [0.0; 2048], ew_buf: [0.0; 1152],
}
}
- #[allow(clippy::cyclomatic_complexity)]
+ #[allow(clippy::cognitive_complexity)]
fn synth(&mut self, coeffs: &[f32; 1024], delay: &mut [f32; 1024], seq: u8, window_shape: bool, prev_window_shape: bool, dst: &mut [f32]) {
let long_win = if window_shape { &self.kbd_long_win } else { &self.sine_long_win };
let short_win = if window_shape { &self.kbd_short_win } else { &self.sine_short_win };
/// Registers all available codecs provided by this crate.
pub fn generic_register_all_decoders(rd: &mut RegisteredDecoders) {
for decoder in DECODERS.iter() {
- rd.add_decoder(decoder.clone());
+ rd.add_decoder(*decoder);
}
}
/// Registers all available encoders provided by this crate.
pub fn generic_register_all_encoders(re: &mut RegisteredEncoders) {
for encoder in ENCODERS.iter() {
- re.add_encoder(encoder.clone());
+ re.add_encoder(*encoder);
}
}
bap_buf_fill: [0; 3],
}
}
- #[allow(clippy::cyclomatic_complexity)]
+ #[allow(clippy::cognitive_complexity)]
fn read(&mut self, br: &mut BitReader, bsi: &BSI, fscod: usize, blk_no: usize) -> DecoderResult<bool> {
let channels = bsi.acmod.get_num_channels();
let is_stereo = bsi.acmod == ACMode::Stereo;
fn reset(&mut self) {
*self = Self::default();
}
- fn map_time(&mut self, sample: u32, tts: &Vec<(u32, u32)>) -> u64 {
+ fn map_time(&mut self, sample: u32, tts: &[(u32, u32)]) -> u64 {
if tts.is_empty() {
u64::from(sample)
} else if sample >= self.sbase {
self.bsize
}
}
+ #[allow(clippy::collapsible_if)]
fn seek(&mut self, pts: u64, tpoint: NATimePoint) -> DemuxerResult<()> {
self.cur_sample = pts as usize;
self.samples_left = 0;
impl<'a> NAOptionHandler for MOVDemuxer<'a> {
fn get_supported_options(&self) -> &[NAOptionDefinition] { DEMUXER_OPTIONS }
+ #[allow(clippy::single_match)]
fn set_options(&mut self, options: &[NAOption]) {
for option in options.iter() {
for opt_def in DEMUXER_OPTIONS.iter() {
///!
///! [`DecompressError::ShortData`]: ../enum.DecompressError.html#variant.ShortData
///! [`DecompressError::OutputFull`]: ../enum.DecompressError.html#variant.OutputFull
+ #[allow(clippy::comparison_chain)]
pub fn decompress_data(&mut self, src: &[u8], dst: &mut [u8], continue_block: bool) -> DecompressResult<usize> {
if src.is_empty() || dst.is_empty() {
return Err(DecompressError::InvalidArgument);
pub fn new() -> Self { Self::default() }
pub fn add_stream(&mut self, id: u32) -> usize {
let ret = self.stream_id_to_index(id);
- if ret.is_none() {
+ if let Some(res) = ret {
+ res
+ } else {
self.seek_info.push(StreamSeekInfo::new(id));
self.seek_info.len() - 1
- } else {
- ret.unwrap()
}
}
/// Adds a new stream to the index.
impl fmt::Display for ColorModel {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
- ColorModel::RGB(fmt) => format!("RGB({})", fmt).to_string(),
- ColorModel::YUV(fmt) => format!("YUV({})", fmt).to_string(),
+ ColorModel::RGB(fmt) => format!("RGB({})", fmt),
+ ColorModel::YUV(fmt) => format!("YUV({})", fmt),
ColorModel::CMYK => "CMYK".to_string(),
ColorModel::HSV => "HSV".to_string(),
ColorModel::LAB => "LAB".to_string(),
}
ssamp
}
- #[allow(clippy::cyclomatic_complexity)]
+ #[allow(clippy::cognitive_complexity)]
/// Returns a short string description of the format if possible.
pub fn to_short_string(&self) -> Option<String> {
match self.model {
impl FromStr for NAPixelFormaton {
type Err = FormatParseError;
+ #[allow(clippy::single_match)]
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"pal8" => return Ok(PAL8_FORMAT),
}
/// Creates a clone of current buffer.
-pub fn copy_buffer(buf: NABufferType) -> NABufferType {
+pub fn copy_buffer(buf: &NABufferType) -> NABufferType {
buf.clone()
}
pub fn set_duration(&mut self, dur: Option<u64>) { self.duration = dur; }
/// Converts time in given scale into timestamp in given base.
+ #[allow(clippy::collapsible_if)]
pub fn time_to_ts(time: u64, base: u64, tb_num: u32, tb_den: u32) -> u64 {
let tb_num = u64::from(tb_num);
let tb_den = u64::from(tb_den);
pub type NAStreamRef = Arc<NAStream>;
/// Downscales the timebase by its greatest common denominator.
+#[allow(clippy::comparison_chain)]
pub fn reduce_timebase(tb_num: u32, tb_den: u32) -> (u32, u32) {
if tb_num == 0 { return (tb_num, tb_den); }
if (tb_den % tb_num) == 0 { return (1, tb_den / tb_num); }
clu1.calc_dist();
clu0.dist + clu1.dist
}
- #[allow(clippy::cyclomatic_complexity)]
+ #[allow(clippy::cognitive_complexity)]
pub fn quantise(&mut self, src: &[Pixel], dst: &mut [[u8; 3]; 256]) {
if src.len() < 3 {
return;
}
}
}
+ #[allow(clippy::float_cmp)]
fn find_node(&mut self, clr: &[f64; 3]) -> usize {
for i in 0..SPECIAL_NODES {
if &self.weights[i] == clr {
fn new() -> Self { Self{} }
}
+#[allow(clippy::comparison_chain)]
fn scale_line<T:Copy>(src: &[T], dst: &mut [T], src_w: usize, dst_w: usize) {
if src_w == dst_w {
(&mut dst[..dst_w]).copy_from_slice(&src[..dst_w]);
/// Registers all available codecs provided by this crate.
pub fn duck_register_all_decoders(rd: &mut RegisteredDecoders) {
for decoder in DUCK_CODECS.iter() {
- rd.add_decoder(decoder.clone());
+ rd.add_decoder(*decoder);
}
}
}
Ok(())
}
- #[allow(clippy::cyclomatic_complexity)]
+ #[allow(clippy::cognitive_complexity)]
fn synth_channel(&mut self, chno: usize, dst: &mut [f32]) {
let coeffs = &mut self.coeffs[chno];
let delay = &mut self.delay[chno];
#[allow(clippy::int_plus_one)]
#[allow(clippy::manual_memcpy)]
- #[allow(clippy::cyclomatic_complexity)]
+ #[allow(clippy::cognitive_complexity)]
fn decode_frame(&mut self, src: &[u8]) -> DecoderResult<()> {
let mut mr = MemoryReader::new_read(src);
let mut br = ByteReader::new(&mut mr);
let mut cr = TableCodebookDescReader::new(&VP30_MBTYPE_CODES, &VP30_MBTYPE_BITS, map_mbt);
let mbtype_cb = Codebook::new(&mut cr, CodebookMode::MSB).unwrap();
unsafe {
- dc_cb = mem::uninitialized();
- ac_i_cb = mem::uninitialized();
- ac_p_cb = mem::uninitialized();
+ dc_cb = mem::MaybeUninit::uninit().assume_init();
+ ac_i_cb = mem::MaybeUninit::uninit().assume_init();
+ ac_p_cb = mem::MaybeUninit::uninit().assume_init();
for i in 0..5 {
let mut cr = TableCodebookDescReader::new(&VP30_DC_CODES[i], &VP30_DC_BITS[i], map_idx);
let cb = Codebook::new(&mut cr, CodebookMode::MSB).unwrap();
let mut ac2_cb: [Codebook<u8>; 16];
let mut ac3_cb: [Codebook<u8>; 16];
unsafe {
- dc_cb = mem::uninitialized();
- ac0_cb = mem::uninitialized();
- ac1_cb = mem::uninitialized();
- ac2_cb = mem::uninitialized();
- ac3_cb = mem::uninitialized();
+ dc_cb = mem::MaybeUninit::uninit().assume_init();
+ ac0_cb = mem::MaybeUninit::uninit().assume_init();
+ ac1_cb = mem::MaybeUninit::uninit().assume_init();
+ ac2_cb = mem::MaybeUninit::uninit().assume_init();
+ ac3_cb = mem::MaybeUninit::uninit().assume_init();
for i in 0..16 {
let mut cr = TableCodebookDescReader::new(&VP31_DC_CODES[i], &VP31_DC_BITS[i], map_idx);
let cb = Codebook::new(&mut cr, CodebookMode::MSB).unwrap();
let mut ac2_cb: [Codebook<u8>; 16];
let mut ac3_cb: [Codebook<u8>; 16];
unsafe {
- dc_cb = mem::uninitialized();
- ac0_cb = mem::uninitialized();
- ac1_cb = mem::uninitialized();
- ac2_cb = mem::uninitialized();
- ac3_cb = mem::uninitialized();
+ dc_cb = mem::MaybeUninit::uninit().assume_init();
+ ac0_cb = mem::MaybeUninit::uninit().assume_init();
+ ac1_cb = mem::MaybeUninit::uninit().assume_init();
+ ac2_cb = mem::MaybeUninit::uninit().assume_init();
+ ac3_cb = mem::MaybeUninit::uninit().assume_init();
for i in 0..16 {
let mut cr = TableCodebookDescReader::new(&VP40_DC_CODES[i], &VP40_DC_BITS[i], map_idx);
let cb = Codebook::new(&mut cr, CodebookMode::MSB).unwrap();
let mut mv_x_cb: [Codebook<i8>; 7];
let mut mv_y_cb: [Codebook<i8>; 7];
unsafe {
- mv_x_cb = mem::uninitialized();
- mv_y_cb = mem::uninitialized();
+ mv_x_cb = mem::MaybeUninit::uninit().assume_init();
+ mv_y_cb = mem::MaybeUninit::uninit().assume_init();
for i in 0..7 {
let mut cr = TableCodebookDescReader::new(&VP40_MV_X_CODES[i], &VP40_MV_X_BITS[i], map_mv);
let cb = Codebook::new(&mut cr, CodebookMode::MSB).unwrap();
}
}
}
- #[allow(clippy::cyclomatic_complexity)]
+ #[allow(clippy::cognitive_complexity)]
fn output_blocks_inter(&mut self, frm: &mut NASimpleVideoFrame<u8>) {
let mut blk_idx = 0;
let bstride = self.mb_w * 2;
}
Ok(self.last_mbt)
}
- #[allow(clippy::cyclomatic_complexity)]
+ #[allow(clippy::cognitive_complexity)]
fn decode_mb(&mut self, frm: &mut NASimpleVideoFrame<u8>, bc: &mut BoolCoder, cr: &mut CoeffReader, br: &mut dyn VP56Parser, hdr: &VP56Header, alpha: bool) -> DecoderResult<()> {
const FOURMV_SUB_TYPE: [VPMBType; 4] = [ VPMBType::InterNoMV, VPMBType::InterMV, VPMBType::InterNearest, VPMBType::InterNear ];
let x = self.fstate.mb_x * 8;
let y = self.fstate.mb_y * 8;
br.mc_block(frm, self.mc_buf.clone(), src.clone(), 1, x, y, mv, self.loop_thr);
- br.mc_block(frm, self.mc_buf.clone(), src.clone(), 2, x, y, mv, self.loop_thr);
+ br.mc_block(frm, self.mc_buf.clone(), src, 2, x, y, mv, self.loop_thr);
}
}
fn do_fourmv(&mut self, br: &dyn VP56Parser, frm: &mut NASimpleVideoFrame<u8>, mvs: &[MV; 4], alpha: bool) {
let sum = mvs[0] + mvs[1] + mvs[2] + mvs[3];
let mv = MV { x: sum.x / 4, y: sum.y / 4 };
br.mc_block(frm, self.mc_buf.clone(), src.clone(), 1, x, y, mv, self.loop_thr);
- br.mc_block(frm, self.mc_buf.clone(), src.clone(), 2, x, y, mv, self.loop_thr);
+ br.mc_block(frm, self.mc_buf.clone(), src, 2, x, y, mv, self.loop_thr);
}
}
fn predict_dc(&mut self, mb_type: VPMBType, _mb_pos: usize, blk_no: usize, _alpha: bool) {
(mv.x >> 3, mv.y >> 3, mv.x & 7, mv.y & 7, mv.x / 8, mv.y / 8)
};
let tmp_blk = mc_buf.get_data_mut().unwrap();
- get_block(tmp_blk, 16, src.clone(), plane, x, y, sx, sy);
+ get_block(tmp_blk, 16, src, plane, x, y, sx, sy);
if (msx & 7) != 0 {
let foff = (8 - (sx & 7)) as usize;
let off = 2 + foff;
if pitch_smode == 0 {
mc_block8x8(dst, uoff, ustride, mb_x * 8, mb_y * 8, chroma_mv.x, chroma_mv.y, refframe.clone(), 1, &mut mc_buf);
- mc_block8x8(dst, voff, vstride, mb_x * 8, mb_y * 8, chroma_mv.x, chroma_mv.y, refframe.clone(), 2, &mut mc_buf);
+ mc_block8x8(dst, voff, vstride, mb_x * 8, mb_y * 8, chroma_mv.x, chroma_mv.y, refframe, 2, &mut mc_buf);
} else {
mc_block_special(dst, uoff, ustride, mb_x * 8, mb_y * 8, chroma_mv.x, chroma_mv.y,
refframe.clone(), 1, &mut mc_buf, 8, pitch_smode);
mc_block_special(dst, voff, vstride, mb_x * 8, mb_y * 8, chroma_mv.x, chroma_mv.y,
- refframe.clone(), 2, &mut mc_buf, 8, pitch_smode);
+ refframe, 2, &mut mc_buf, 8, pitch_smode);
}
} else {
for y in 0..2 {
Err(DecoderError::InvalidData)
}
}
- #[allow(clippy::cyclomatic_complexity)]
+ #[allow(clippy::cognitive_complexity)]
fn decode(&mut self, supp: &mut NADecoderSupport, pkt: &NAPacket) -> DecoderResult<NAFrameRef> {
let src = pkt.get_buffer();
extern crate nihav_codec_support;
#[allow(clippy::collapsible_if)]
+#[allow(clippy::comparison_chain)]
#[allow(clippy::excessive_precision)]
#[allow(clippy::identity_op)]
#[allow(clippy::unreadable_literal)]
/// Registers all available codecs provided by this crate.
pub fn game_register_all_decoders(rd: &mut RegisteredDecoders) {
for decoder in GAME_CODECS.iter() {
- rd.add_decoder(decoder.clone());
+ rd.add_decoder(*decoder);
}
}
}
}
#[allow(clippy::identity_op)]
- #[allow(clippy::cyclomatic_complexity)]
+ #[allow(clippy::cognitive_complexity)]
fn decode(&mut self, _supp: &mut NADecoderSupport, pkt: &NAPacket) -> DecoderResult<NAFrameRef> {
let info = pkt.get_stream().get_info();
if let NACodecTypeInfo::Audio(_) = info.get_properties() {
self.skip_flag[i] = false;
}
}
- #[allow(clippy::cyclomatic_complexity)]
+ #[allow(clippy::cognitive_complexity)]
fn calculate_bit_allocation(&mut self, ch_data: &mut IMCChannel, bits: usize, fixed_head: bool, adj_idx: usize) -> DecoderResult<()> {
let mut peak = 0.0;
weights2.copy_from_slice(&IMC_WEIGHTS2);
}
unsafe {
- codes = mem::uninitialized();
+ codes = mem::MaybeUninit::uninit().assume_init();
for i in 0..4 {
for j in 0..4 {
let mut cr = IMCCodeReader::new(i, j);
Ok((self.bbuf >> self.bpos) & 0x3)
}
- #[allow(clippy::cyclomatic_complexity)]
+ #[allow(clippy::cognitive_complexity)]
fn decode_cell_data(&mut self, br: &mut ByteReader, cell: IV3Cell,
off: usize, stride: usize, params: CellDecParams, vq_idx: u8) -> DecoderResult<()> {
let blk_w = cell.w * 4 / params.bw;
Ok(BandHeader::new(plane_no, band_no, self.mb_size[band_id], self.blk_size[band_id], self.is_hpel[band_id], inherit_mv, has_qdelta, inherit_qd, band_q, rvmap_idx, num_corr, corr_map, blk_cb, tr, txtype))
}
- #[allow(clippy::cyclomatic_complexity)]
+ #[allow(clippy::cognitive_complexity)]
fn decode_mb_info(&mut self, br: &mut BitReader, pic_hdr: &PictureHeader, band: &BandHeader, tile: &mut IVITile, ref_tile: Option<&IVITile>, mv_scale: u8) -> DecoderResult<()> {
let mut mv_x = 0;
let mut mv_y = 0;
blockdsp::copy_block(&mut dst, src.clone(), 0, xpos, ypos, mv.x >> 1, mv.y >> 1, 16, 16, 0, 1, mode, H263_INTERP_FUNCS);
blockdsp::copy_block(&mut dst, src.clone(), 1, xpos >> 1, ypos >> 1, mv.x >> 2, mv.y >> 2, 8, 8, 0, 1, cmode, H263_INTERP_FUNCS);
- blockdsp::copy_block(&mut dst, src.clone(), 2, xpos >> 1, ypos >> 1, mv.x >> 2, mv.y >> 2, 8, 8, 0, 1, cmode, H263_INTERP_FUNCS);
+ blockdsp::copy_block(&mut dst, src, 2, xpos >> 1, ypos >> 1, mv.x >> 2, mv.y >> 2, 8, 8, 0, 1, cmode, H263_INTERP_FUNCS);
}
fn copy_blocks8x8(&self, dst: &mut NAVideoBuffer<u8>, src: NAVideoBufferRef<u8>, xpos: usize, ypos: usize, mvs: &[MV; 4]) {
let mut dst = NASimpleVideoFrame::from_video_buf(dst).unwrap();
blockdsp::copy_block(&mut dst, src.clone(), 0, xpos, ypos, mv.x >> 1, mv.y >> 1, 16, 16, 0, 1, mode, H263_INTERP_AVG_FUNCS);
blockdsp::copy_block(&mut dst, src.clone(), 1, xpos >> 1, ypos >> 1, mv.x >> 2, mv.y >> 2, 8, 8, 0, 1, cmode, H263_INTERP_AVG_FUNCS);
- blockdsp::copy_block(&mut dst, src.clone(), 2, xpos >> 1, ypos >> 1, mv.x >> 2, mv.y >> 2, 8, 8, 0, 1, cmode, H263_INTERP_AVG_FUNCS);
+ blockdsp::copy_block(&mut dst, src, 2, xpos >> 1, ypos >> 1, mv.x >> 2, mv.y >> 2, 8, 8, 0, 1, cmode, H263_INTERP_AVG_FUNCS);
}
fn avg_blocks8x8(&self, dst: &mut NAVideoBuffer<u8>, src: NAVideoBufferRef<u8>, xpos: usize, ypos: usize, mvs: &[MV; 4]) {
let mut dst = NASimpleVideoFrame::from_video_buf(dst).unwrap();
let seq = br.peek(21);
if seq == 0xBFFF8 {
let res2 = self.decode_single_frame(dec, br);
- if res2.is_ok() {
- self.bref = Some(res2.unwrap());
+ if let Ok(res) = res2 {
+ self.bref = Some(res);
}
}
self.ftype = IVIFrameType::Intra;
/// Registers all available codecs provided by this crate.
pub fn indeo_register_all_decoders(rd: &mut RegisteredDecoders) {
for decoder in INDEO_CODECS.iter() {
- rd.add_decoder(decoder.clone());
+ rd.add_decoder(*decoder);
}
}
MV{ x, y }
}
-#[allow(clippy::cyclomatic_complexity)]
+#[allow(clippy::cognitive_complexity)]
pub fn decode_mb_pred_cabac(cabac: &mut CABAC, slice_hdr: &SliceHeader, mb_type: MBType, sstate: &mut SliceState, mb_info: &mut CurrentMBInfo) {
mb_info.mb_type = mb_type;
let num_l0 = slice_hdr.num_ref_idx_l0_active;
Ok(())
}
-#[allow(clippy::cyclomatic_complexity)]
+#[allow(clippy::cognitive_complexity)]
pub fn decode_mb_pred_cavlc(br: &mut BitReader, slice_hdr: &SliceHeader, mb_type: MBType, sstate: &mut SliceState, mb_info: &mut CurrentMBInfo) -> DecoderResult<()> {
mb_info.mb_type = mb_type;
let num_l0 = slice_hdr.num_ref_idx_l0_active;
}
}
}
- #[allow(clippy::cyclomatic_complexity)]
+ #[allow(clippy::cognitive_complexity)]
fn handle_macroblock(&mut self, mb_info: &mut CurrentMBInfo) {
let pps = &self.pps[self.cur_pps];
self.ref_pics.truncate(0);
self.long_term.truncate(0);
}
- #[allow(clippy::cyclomatic_complexity)]
+ #[allow(clippy::cognitive_complexity)]
pub fn select_refs(&mut self, sps: &SeqParameterSet, slice_hdr: &SliceHeader, cur_id: u32) {
self.ref_list0.truncate(0);
self.ref_list1.truncate(0);
}
}
-#[allow(clippy::cyclomatic_complexity)]
+#[allow(clippy::cognitive_complexity)]
pub fn parse_sps(src: &[u8]) -> DecoderResult<SeqParameterSet> {
let mut br = BitReader::new(src, BitReaderMode::BE);
let mut sps: SeqParameterSet = unsafe { std::mem::zeroed() };
Ok((first_mb_in_slice, slice_type))
}
-#[allow(clippy::cyclomatic_complexity)]
+#[allow(clippy::cognitive_complexity)]
pub fn parse_slice_header(br: &mut BitReader, sps_arr: &[SeqParameterSet], pps_arr: &[PicParameterSet], is_idr: bool, nal_ref_idc: u8) -> DecoderResult<SliceHeader> {
let mut hdr: SliceHeader = unsafe { std::mem::zeroed() };
/// Registers all available codecs provided by this crate.
pub fn itu_register_all_decoders(rd: &mut RegisteredDecoders) {
for decoder in ITU_CODECS.iter() {
- rd.add_decoder(decoder.clone());
+ rd.add_decoder(*decoder);
}
}
extern crate nihav_codec_support;
#[allow(clippy::collapsible_if)]
+#[allow(clippy::comparison_chain)]
#[allow(clippy::needless_range_loop)]
#[allow(clippy::useless_let_if_seq)]
mod codecs;
/// Registers all available codecs provided by this crate.
pub fn llaudio_register_all_decoders(rd: &mut RegisteredDecoders) {
for decoder in LL_AUDIO_CODECS.iter() {
- rd.add_decoder(decoder.clone());
+ rd.add_decoder(*decoder);
}
}
dstate: DecorrState::new(),
}
}
- #[allow(clippy::cyclomatic_complexity)]
+ #[allow(clippy::cognitive_complexity)]
fn decode_block(&mut self, hdr: &WVHeader, src: &[u8], start_ch: usize, abuf: &mut NABufferType) -> DecoderResult<()> {
let mut mr = MemoryReader::new_read(src);
let mut br = ByteReader::new(&mut mr);
extern crate nihav_core;
extern crate nihav_codec_support;
+#[allow(clippy::comparison_chain)]
#[allow(clippy::unreadable_literal)]
#[allow(clippy::verbose_bit_mask)]
mod codecs;
/// Registers all available codecs provided by this crate.
pub fn ms_register_all_decoders(rd: &mut RegisteredDecoders) {
for decoder in MS_CODECS.iter() {
- rd.add_decoder(decoder.clone());
+ rd.add_decoder(*decoder);
}
}
/// Registers all available encoders provided by this crate.
pub fn ms_register_all_encoders(re: &mut RegisteredEncoders) {
for encoder in MS_ENCODERS.iter() {
- re.add_encoder(encoder.clone());
+ re.add_encoder(*encoder);
}
}
let soniton = NASoniton::new(4, 0);
let out_ainfo = NAAudioInfo::new(ainfo.sample_rate, ainfo.channels, soniton, Self::calc_block_size(self.block_len, self.channels));
let info = NACodecInfo::new("ms-adpcm", NACodecTypeInfo::Audio(out_ainfo), None);
- let mut stream = NAStream::new(StreamType::Audio, stream_id, info.clone(), self.block_len as u32, ainfo.sample_rate, 0);
+ let mut stream = NAStream::new(StreamType::Audio, stream_id, info, self.block_len as u32, ainfo.sample_rate, 0);
stream.set_num(stream_id as usize);
let stream = stream.into_ref();
/// Registers all available codecs provided by this crate.
pub fn qt_register_all_decoders(rd: &mut RegisteredDecoders) {
for decoder in QT_CODECS.iter() {
- rd.add_decoder(decoder.clone());
+ rd.add_decoder(*decoder);
}
}
}
}
}
- #[allow(clippy::cyclomatic_complexity)]
+ #[allow(clippy::cognitive_complexity)]
fn read_noise_band(&mut self, br: &mut QdmBitReader, ch: usize, band: usize, samples: &mut [f32; 10], signs: &[bool; 16], jstereo: bool) -> DecoderResult<()> {
let mut type34_first = true;
let mut type34_pred = 0.0;
Err(DecoderError::InvalidData)
}
}
- #[allow(clippy::cyclomatic_complexity)]
+ #[allow(clippy::cognitive_complexity)]
fn decode(&mut self, _supp: &mut NADecoderSupport, pkt: &NAPacket) -> DecoderResult<NAFrameRef> {
let src = pkt.get_buffer();
validate!(src.len() >= 2);
copy_block(dframe, src.clone(), ebuf, 0, xoff, yoff, mx, my, bw * 4, bh * 4, 0, post, mode, ifuncs);
copy_block(dframe, src.clone(), ebuf, 1, xoff / 2, yoff / 2, cmx, cmy, bw * 2, bh * 2, 0, post, mode, ifuncs);
- copy_block(dframe, src.clone(), ebuf, 2, xoff / 2, yoff / 2, cmx, cmy, bw * 2, bh * 2, 0, post, mode, ifuncs);
+ copy_block(dframe, src, ebuf, 2, xoff / 2, yoff / 2, cmx, cmy, bw * 2, bh * 2, 0, post, mode, ifuncs);
}
impl SVQ3Decoder {
if let (Some(bwd_ref), true, true) = (self.ipbs.get_b_bwdref(), has_fwd, has_bwd) {
let mut aframe = NASimpleVideoFrame::from_video_buf(&mut self.avg_buf).unwrap();
let amv = MV { x: bmv.x + (self.mb_x as i16) * 16 * 6, y: bmv.y + (self.mb_y as i16) * 16 * 6 };
- mc_part(&mut aframe, bwd_ref.clone(), &mut self.ebuf, 0, 0, 4, 4, amv, bmode, ifuncs);
+ mc_part(&mut aframe, bwd_ref, &mut self.ebuf, 0, 0, 4, 4, amv, bmode, ifuncs);
let dstride = dframe.stride[0];
let dst = &mut dframe.data[dframe.offset[0] + self.mb_x * 16 + self.mb_y * 16 * dstride..];
extern crate nihav_core;
extern crate nihav_codec_support;
+#[allow(clippy::comparison_chain)]
#[allow(clippy::single_match)]
mod codecs;
pub use crate::codecs::qt_register_all_decoders;
Self::default()
}
- #[allow(clippy::cyclomatic_complexity)]
+ #[allow(clippy::cognitive_complexity)]
fn decode_frame_new(&mut self, br: &mut BitReader, buf: &mut NAVideoBuffer<u8>, is_intra: bool) -> DecoderResult<()> {
let (stride_y, stride_u, stride_v, stride_a) = (buf.get_stride(0), buf.get_stride(1), buf.get_stride(2), buf.get_stride(3));
let (mut off_y, mut off_u, mut off_v, mut off_a) = (buf.get_offset(0), buf.get_offset(1), buf.get_offset(2), buf.get_offset(3));
fn default() -> Self {
let mut cb: [Codebook<u8>; 16];
unsafe {
- cb = std::mem::uninitialized();
+ cb = std::mem::MaybeUninit::uninit().assume_init();
for i in 0..16 {
let mut cr = TableCodebookDescReader::new(&BINK_TREE_CODES[i], &BINK_TREE_BITS[i], map_u8);
std::ptr::write(&mut cb[i], Codebook::new(&mut cr, CodebookMode::LSB).unwrap());
/// Registers all available codecs provided by this crate.
pub fn rad_register_all_decoders(rd: &mut RegisteredDecoders) {
for decoder in RAD_CODECS.iter() {
- rd.add_decoder(decoder.clone());
+ rd.add_decoder(*decoder);
}
}
let stream = strres.unwrap();
let keyframe = (self.frame_pos[self.cur_frame] & 1) != 0;
let ts = NATimeInfo::new(Some(self.cur_frame as u64), None, None, self.tb_num, self.tb_den);
- let pkt = self.src.read_packet(stream.clone(), ts, keyframe, payload_size)?;
+ let pkt = self.src.read_packet(stream, ts, keyframe, payload_size)?;
self.cur_frame += 1;
pub use crate::codecs::rad_register_all_decoders;
#[cfg(feature="demuxers")]
+#[allow(clippy::comparison_chain)]
#[allow(clippy::cast_lossless)]
mod demuxers;
#[cfg(feature="demuxers")]
/// Registers all available codecs provided by this crate.
pub fn realmedia_register_all_decoders(rd: &mut RegisteredDecoders) {
for decoder in RM_CODECS.iter() {
- rd.add_decoder(decoder.clone());
+ rd.add_decoder(*decoder);
}
}
blockdsp::copy_block(&mut dst, src.clone(), 0, xpos, ypos, mv.x >> 1, mv.y >> 1, 16, 16, 0, 1, mode, H263_INTERP_FUNCS);
blockdsp::copy_block(&mut dst, src.clone(), 1, xpos >> 1, ypos >> 1, mv.x >> 2, mv.y >> 2, 8, 8, 0, 1, cmode, H263_INTERP_FUNCS);
- blockdsp::copy_block(&mut dst, src.clone(), 2, xpos >> 1, ypos >> 1, mv.x >> 2, mv.y >> 2, 8, 8, 0, 1, cmode, H263_INTERP_FUNCS);
+ blockdsp::copy_block(&mut dst, src, 2, xpos >> 1, ypos >> 1, mv.x >> 2, mv.y >> 2, 8, 8, 0, 1, cmode, H263_INTERP_FUNCS);
}
fn copy_blocks8x8(&self, dst: &mut NAVideoBuffer<u8>, src: NAVideoBufferRef<u8>, xpos: usize, ypos: usize, mvs: &[MV; 4]) {
let mut dst = NASimpleVideoFrame::from_video_buf(dst).unwrap();
blockdsp::copy_block(&mut dst, src.clone(), 0, xpos, ypos, mv.x >> 1, mv.y >> 1, 16, 16, 0, 1, mode, H263_INTERP_AVG_FUNCS);
blockdsp::copy_block(&mut dst, src.clone(), 1, xpos >> 1, ypos >> 1, mv.x >> 2, mv.y >> 2, 8, 8, 0, 1, cmode, H263_INTERP_AVG_FUNCS);
- blockdsp::copy_block(&mut dst, src.clone(), 2, xpos >> 1, ypos >> 1, mv.x >> 2, mv.y >> 2, 8, 8, 0, 1, cmode, H263_INTERP_AVG_FUNCS);
+ blockdsp::copy_block(&mut dst, src, 2, xpos >> 1, ypos >> 1, mv.x >> 2, mv.y >> 2, 8, 8, 0, 1, cmode, H263_INTERP_AVG_FUNCS);
}
fn avg_blocks8x8(&self, dst: &mut NAVideoBuffer<u8>, src: NAVideoBufferRef<u8>, xpos: usize, ypos: usize, mvs: &[MV; 4]) {
let mut dst = NASimpleVideoFrame::from_video_buf(dst).unwrap();
}
}
- #[allow(clippy::cyclomatic_complexity)]
+ #[allow(clippy::cognitive_complexity)]
pub fn parse_frame(&mut self, supp: &mut NADecoderSupport, src: &[u8], bd: &mut dyn RV34BitstreamDecoder) -> DecoderResult<(NABufferType, FrameType, u64)> {
let mut slice_offs: Vec<usize> = Vec::new();
parse_slice_offsets(src, &mut slice_offs)?;
const RV30_EDGE2: [isize; 3] = [ 0, 2, 2 ];
impl RV34DSP for RV30DSP {
- #[allow(clippy::cyclomatic_complexity)]
+ #[allow(clippy::cognitive_complexity)]
fn loop_filter(&self, frame: &mut NAVideoBuffer<u8>, _ftype: FrameType, mbinfo: &[RV34MBInfo], mb_w: usize, _mb_h: usize, row: usize) {
let mut offs: [usize; 3] = [0; 3];
let mut stride: [usize; 3] = [0; 3];
const C_RIGHT_COL_MASK: u32 = 0xA;
impl RV34DSP for RV40DSP {
- #[allow(clippy::cyclomatic_complexity)]
+ #[allow(clippy::cognitive_complexity)]
fn loop_filter(&self, frame: &mut NAVideoBuffer<u8>, _ftype: FrameType, mbinfo: &[RV34MBInfo], mb_w: usize, mb_h: usize, row: usize) {
// todo proper B-frame filtering?
let mut offs: [usize; 3] = [0; 3];
}
Ok(())
}
- #[allow(clippy::cyclomatic_complexity)]
+ #[allow(clippy::cognitive_complexity)]
fn decode_cb_tree(&mut self, buf: &mut NASimpleVideoFrame<u8>, hdr: &FrameHeader, br: &mut BitReader, xpos: usize, ypos: usize, log_size: u8) -> DecoderResult<()> {
if (xpos >= hdr.width) || (ypos >= hdr.height) { return Ok(()); }
let mut cbp8_cb: [[Codebook<u16>; 4]; NUM_INTER_SETS];
let mut cbp16_cb: [[Codebook<u16>; 12]; NUM_INTER_SETS];
unsafe {
- cbp8_cb = mem::uninitialized();
- cbp16_cb = mem::uninitialized();
+ cbp8_cb = mem::MaybeUninit::uninit().assume_init();
+ cbp16_cb = mem::MaybeUninit::uninit().assume_init();
for set_no in 0..NUM_INTER_SETS {
for i in 0..4 {
let mut cbr = RV60CodebookDescReader::new(&RV60_CBP8_TABS[set_no][i], NUM_CBP_ENTRIES, false);
let mut intra_coeff_cb: [CoeffCodebooks; NUM_INTRA_SETS];
let mut inter_coeff_cb: [CoeffCodebooks; NUM_INTER_SETS];
unsafe {
- intra_coeff_cb = mem::uninitialized();
+ intra_coeff_cb = mem::MaybeUninit::uninit().assume_init();
for set_no in 0..NUM_INTRA_SETS {
ptr::write(&mut intra_coeff_cb[set_no], CoeffCodebooks::init(set_no, true));
}
- inter_coeff_cb = mem::uninitialized();
+ inter_coeff_cb = mem::MaybeUninit::uninit().assume_init();
for set_no in 0..NUM_INTER_SETS {
ptr::write(&mut inter_coeff_cb[set_no], CoeffCodebooks::init(set_no, false));
}
});
}
-#[allow(clippy::cyclomatic_complexity)]
+#[allow(clippy::cognitive_complexity)]
fn luma_mc(dst: &mut [u8], mut didx: usize, dstride: usize, src: &[u8], mut sidx: usize, sstride: usize, w: usize, h: usize, cx: usize, cy: usize) {
if (cx == 0) && (cy == 0) {
for _ in 0..h {
sum += diff;
}
}
- #[allow(clippy::cyclomatic_complexity)]
+ #[allow(clippy::cognitive_complexity)]
pub fn pred_angle(&self, dst: &mut [u8], mut doff: usize, dstride: usize, size: usize, angle: usize, filter: bool) {
let mut filtered1: [u8; 96] = [0; 96];
let mut filtered2: [u8; 96] = [0; 96];
vec.push(c);
}
let str = String::from_utf8(vec);
- if str.is_ok() {
- Ok(str.unwrap())
+ if let Ok(res) = str {
+ Ok(res)
} else {
Ok(String::new())
}
}
}
-static RM_VIDEO_CODEC_REGISTER: &'static [(&[u8;4], &str)] = &[
+static RM_VIDEO_CODEC_REGISTER: &[(&[u8;4], &str)] = &[
(b"RV10", "realvideo1"),
(b"RV20", "realvideo2"),
(b"RVTR", "realvideo2"),
(b"CLV1", "clearvideo_rm"),
];
-static RM_AUDIO_CODEC_REGISTER: &'static [(&[u8;4], &str)] = &[
+static RM_AUDIO_CODEC_REGISTER: &[(&[u8;4], &str)] = &[
(b"lpcJ", "ra14.4"),
(b"28_8", "ra28.8"),
(b"cook", "cook"),
#[cfg(feature="decoders")]
#[allow(clippy::cast_lossless)]
#[allow(clippy::collapsible_if)]
+#[allow(clippy::comparison_chain)]
#[allow(clippy::excessive_precision)]
#[allow(clippy::identity_op)]
#[allow(clippy::needless_range_loop)]
None
}
-static CODEC_REGISTER: &'static [CodecDescription] = &[
+static CODEC_REGISTER: &[CodecDescription] = &[
desc!(audio-ll; "pcm", "PCM"),
desc!(audio; "alaw", "A-law PCM"),
desc!(audio; "ulaw", "mu-law PCM"),
desc!(video; "h264", "ITU H.264", CODEC_CAP_COMPLEX_REORDER | CODEC_CAP_HYBRID),
];
-static AVI_VIDEO_CODEC_REGISTER: &'static [(&[u8;4], &str)] = &[
+static AVI_VIDEO_CODEC_REGISTER: &[(&[u8;4], &str)] = &[
(&[1, 0, 0, 0], "msrle"),
(&[2, 0, 0, 0], "msrle"),
(b"VP70", "vp7"),
];
-static WAV_CODEC_REGISTER: &'static [(u16, &str)] = &[
+static WAV_CODEC_REGISTER: &[(u16, &str)] = &[
(0x0000, "unknown"),
(0x0001, "pcm"),
(0x0002, "ms-adpcm"),
(0x0501, "on2avc-501"),
];
-static MOV_VIDEO_CODEC_REGISTER: &'static [(&[u8;4], &str)] = &[
+static MOV_VIDEO_CODEC_REGISTER: &[(&[u8;4], &str)] = &[
(b"cvid", "cinepak"),
(b"jpeg", "jpeg"),
//(b"raw ", "raw"),
(b"avc1", "h264"),
];
-static MOV_AUDIO_CODEC_REGISTER: &'static [(&[u8;4], &str)] = &[
+static MOV_AUDIO_CODEC_REGISTER: &[(&[u8;4], &str)] = &[
(b"NONE", "pcm"),
(b"raw ", "pcm"),
(b"twos", "pcm"),
/// Registers all available codecs provided by this crate.
pub fn vivo_register_all_decoders(rd: &mut RegisteredDecoders) {
for decoder in VIVO_CODECS.iter() {
- rd.add_decoder(decoder.clone());
+ rd.add_decoder(*decoder);
}
}
blockdsp::copy_block(&mut dst, src.clone(), 0, xpos, ypos, mv.x >> 1, mv.y >> 1, 16, 16, 0, 1, mode, H263_INTERP_FUNCS);
blockdsp::copy_block(&mut dst, src.clone(), 1, xpos >> 1, ypos >> 1, mv.x >> 2, mv.y >> 2, 8, 8, 0, 1, cmode, H263_INTERP_FUNCS);
- blockdsp::copy_block(&mut dst, src.clone(), 2, xpos >> 1, ypos >> 1, mv.x >> 2, mv.y >> 2, 8, 8, 0, 1, cmode, H263_INTERP_FUNCS);
+ blockdsp::copy_block(&mut dst, src, 2, xpos >> 1, ypos >> 1, mv.x >> 2, mv.y >> 2, 8, 8, 0, 1, cmode, H263_INTERP_FUNCS);
}
fn copy_blocks8x8(&self, dst: &mut NAVideoBuffer<u8>, src: NAVideoBufferRef<u8>, xpos: usize, ypos: usize, mvs: &[MV; 4]) {
let mut dst = NASimpleVideoFrame::from_video_buf(dst).unwrap();
- for i in 0..4 {
+ for (i, mv) in mvs.iter().enumerate() {
let xadd = (i & 1) * 8;
let yadd = (i & 2) * 4;
- let mode = ((mvs[i].x & 1) + (mvs[i].y & 1) * 2) as usize;
+ let mode = ((mv.x & 1) + (mv.y & 1) * 2) as usize;
- blockdsp::copy_block(&mut dst, src.clone(), 0, xpos + xadd, ypos + yadd, mvs[i].x >> 1, mvs[i].y >> 1, 8, 8, 0, 1, mode, H263_INTERP_FUNCS);
+ blockdsp::copy_block(&mut dst, src.clone(), 0, xpos + xadd, ypos + yadd, mv.x >> 1, mv.y >> 1, 8, 8, 0, 1, mode, H263_INTERP_FUNCS);
}
let sum_mv = mvs[0] + mvs[1] + mvs[2] + mvs[3];
blockdsp::copy_block(&mut dst, src.clone(), 0, xpos, ypos, mv.x >> 1, mv.y >> 1, 16, 16, 0, 1, mode, H263_INTERP_AVG_FUNCS);
blockdsp::copy_block(&mut dst, src.clone(), 1, xpos >> 1, ypos >> 1, mv.x >> 2, mv.y >> 2, 8, 8, 0, 1, cmode, H263_INTERP_AVG_FUNCS);
- blockdsp::copy_block(&mut dst, src.clone(), 2, xpos >> 1, ypos >> 1, mv.x >> 2, mv.y >> 2, 8, 8, 0, 1, cmode, H263_INTERP_AVG_FUNCS);
+ blockdsp::copy_block(&mut dst, src, 2, xpos >> 1, ypos >> 1, mv.x >> 2, mv.y >> 2, 8, 8, 0, 1, cmode, H263_INTERP_AVG_FUNCS);
}
fn avg_blocks8x8(&self, dst: &mut NAVideoBuffer<u8>, src: NAVideoBufferRef<u8>, xpos: usize, ypos: usize, mvs: &[MV; 4]) {
let mut dst = NASimpleVideoFrame::from_video_buf(dst).unwrap();
- for i in 0..4 {
+ for (i, mv) in mvs.iter().enumerate() {
let xadd = (i & 1) * 8;
let yadd = (i & 2) * 4;
- let mode = ((mvs[i].x & 1) + (mvs[i].y & 1) * 2) as usize;
+ let mode = ((mv.x & 1) + (mv.y & 1) * 2) as usize;
- blockdsp::copy_block(&mut dst, src.clone(), 0, xpos + xadd, ypos + yadd, mvs[i].x >> 1, mvs[i].y >> 1, 8, 8, 0, 1, mode, H263_INTERP_AVG_FUNCS);
+ blockdsp::copy_block(&mut dst, src.clone(), 0, xpos + xadd, ypos + yadd, mv.x >> 1, mv.y >> 1, 8, 8, 0, 1, mode, H263_INTERP_AVG_FUNCS);
}
let sum_mv = mvs[0] + mvs[1] + mvs[2] + mvs[3];