1 //! Deflate format (RFC 1951) support.
3 //! This module provides functionality for decompressing raw deflated streams via [`Inflate`] and gzip files (RFC 1952) via [`gzip_decode`] and compressing raw or zlib streams via [`Deflate`].
5 //! [`Deflate`]: ./struct.Deflate.html
6 //! [`Inflate`]: ./struct.Inflate.html
7 //! [`gzip_decode`]: ./fn.gzip_decode.html
11 //! Decompressing full input buffer into sufficiently large output buffer:
13 //! # use nihav_core::compr::DecompressError;
14 //! use nihav_core::compr::deflate::Inflate;
16 //! # fn decompress(input: &[u8]) -> Result<(), DecompressError> {
17 //! # let mut output_buffer = [0u8; 16];
18 //! let output_length = Inflate::uncompress(input, &mut output_buffer)?;
23 //! Decompressing input chunks into portions of output:
25 //! use nihav_core::compr::DecompressError;
26 //! use nihav_core::compr::deflate::Inflate;
28 //! # fn decompress(input_data: &[u8]) -> Result<(), DecompressError> {
29 //! let mut inflate = Inflate::new();
30 //! let mut dst_buf: Vec<u8> = Vec::new();
31 //! let mut output_chunk = [0u8; 1024];
32 //! for src in input_data.chunks(512) {
33 //! let mut repeat = false;
35 //! let ret = inflate.decompress_data(src, &mut output_chunk, repeat);
37 //! Ok(len) => { // we got a buffer decoded successfully to the end
38 //! dst_buf.extend_from_slice(&output_chunk[..len]);
41 //! Err(DecompressError::ShortData) => { // this block of data was fully read
44 //! Err(DecompressError::OutputFull) => {
45 //! // the output buffer is full, flush it and continue decoding the same block
47 //! dst_buf.extend_from_slice(&output_chunk);
59 //! Compressing input buffer into zlib stream:
61 //! use nihav_core::compr::deflate::{Deflate, DeflateMode, DeflateWriter};
63 //! # fn compress(input: &[u8]) {
64 //! let output = Vec::with_capacity(input.len() * 65540 / 65535 + 6);
65 //! let mut writer = DeflateWriter::new(output);
66 //! let mut compr = Deflate::new(DeflateMode::Fast);
67 //! compr.write_zlib_header(&mut writer);
68 //! compr.compress(input, &mut writer);
69 //! compr.compress_end(&mut writer);
70 //! let output = writer.end();
74 use crate::io::byteio::*;
75 use crate::io::bitreader::*;
76 use crate::io::codebook::*;
79 const NUM_LITERALS: usize = 287;
80 const NUM_DISTS: usize = 32;
82 struct FixedLenCodeReader {}
84 impl CodebookDescReader<u16> for FixedLenCodeReader {
85 fn bits(&mut self, idx: usize) -> u8 {
87 else if idx < 256 { 9 }
88 else if idx < 280 { 7 }
91 #[allow(clippy::identity_op)]
92 fn code(&mut self, idx: usize) -> u32 {
93 let base = idx as u32;
94 let bits = self.bits(idx);
95 if idx < 144 { reverse_bits(base + 0x30, bits) }
96 else if idx < 256 { reverse_bits(base + 0x190 - 144, bits) }
97 else if idx < 280 { reverse_bits(base + 0x000 - 256, bits) }
98 else { reverse_bits(base + 0xC0 - 280, bits) }
100 fn sym (&mut self, idx: usize) -> u16 { idx as u16 }
101 fn len(&mut self) -> usize { NUM_LITERALS + 1 }
104 #[derive(Clone,Copy,Default)]
105 struct BitReaderState {
111 struct CurrentSource<'a> {
116 impl<'a> CurrentSource<'a> {
117 fn new(src: &'a [u8], br: BitReaderState) -> Self {
118 let mut newsrc = Self { src, br };
123 fn reinit(src: &'a [u8], br: BitReaderState) -> Self {
124 let mut newsrc = Self { src, br };
128 fn refill(&mut self) {
129 while (self.br.bits <= 24) && (self.br.pos < self.src.len()) {
130 self.br.bitbuf |= u32::from(self.src[self.br.pos]) << self.br.bits;
135 fn skip_cache(&mut self, nbits: u8) {
136 self.br.bitbuf >>= nbits;
137 self.br.bits -= nbits;
139 fn read(&mut self, nbits: u8) -> BitReaderResult<u32> {
140 if nbits == 0 { return Ok(0); }
141 if nbits > 16 { return Err(BitReaderError::TooManyBitsRequested); }
142 if self.br.bits < nbits {
144 if self.br.bits < nbits { return Err(BitReaderError::BitstreamEnd); }
146 let ret = self.br.bitbuf & ((1 << nbits) - 1);
147 self.skip_cache(nbits);
150 fn read_bool(&mut self) -> BitReaderResult<bool> {
151 if self.br.bits == 0 {
153 if self.br.bits == 0 { return Err(BitReaderError::BitstreamEnd); }
155 let ret = (self.br.bitbuf & 1) != 0;
159 fn peek(&mut self, nbits: u8) -> u32 {
160 if nbits == 0 || nbits > 16 { return 0; }
161 if self.br.bits < nbits {
164 self.br.bitbuf & ((1 << nbits) - 1)
166 fn skip(&mut self, nbits: u32) -> BitReaderResult<()> {
167 if u32::from(self.br.bits) >= nbits {
168 self.skip_cache(nbits as u8);
174 fn align(&mut self) {
175 let b = self.br.bits & 7;
180 fn left(&self) -> isize {
181 ((self.src.len() as isize) - (self.br.pos as isize)) * 8 + (self.br.bits as isize)
185 impl<'a, S: Copy> CodebookReader<S> for CurrentSource<'a> {
186 fn read_cb(&mut self, cb: &Codebook<S>) -> CodebookResult<S> {
189 let mut lut_bits = cb.lut_bits;
190 let orig_br = self.br;
192 let lut_idx = (self.peek(lut_bits) as usize) + (idx as usize);
193 if cb.table[lut_idx] == TABLE_FILL_VALUE { return Err(CodebookError::InvalidCode); }
194 let bits = cb.table[lut_idx] & 0x7F;
195 esc = (cb.table[lut_idx] & 0x80) != 0;
196 idx = (cb.table[lut_idx] >> 8) as usize;
197 let skip_bits = if esc { u32::from(lut_bits) } else { bits };
198 if (skip_bits as isize) > self.left() {
201 return Err(CodebookError::MemoryError);
203 self.skip(skip_bits as u32).unwrap();
204 lut_bits = bits as u8;
215 StaticBlockInvLen(u32),
216 StaticBlockCopy(usize),
218 FixedBlockLengthExt(usize, u8),
219 FixedBlockDist(usize),
220 FixedBlockDistExt(usize, usize, u8),
221 FixedBlockCopy(usize, usize),
222 FixedBlockLiteral(u8),
228 DynCodeLengthsAdd(usize),
230 DynBlockLengthExt(usize, u8),
232 DynBlockDistExt(usize, usize, u8),
233 DynCopy(usize, usize),
238 ///! The decompressor for deflated streams (RFC 1951).
241 fix_len_cb: Codebook<u16>,
252 dyn_len_cb: Option<Codebook<u32>>,
253 dyn_lit_cb: Option<Codebook<u32>>,
254 dyn_dist_cb: Option<Codebook<u32>>,
255 len_lengths: [u8; 19],
256 all_lengths: [u8; NUM_LITERALS + NUM_DISTS],
260 const LENGTH_ADD_BITS: [u8; 29] = [
261 0, 0, 0, 0, 0, 0, 0, 0, 1, 1,
262 1, 1, 2, 2, 2, 2, 3, 3, 3, 3,
263 4, 4, 4, 4, 5, 5, 5, 5, 0
265 const LENGTH_BASE: [u16; 29] = [
266 3, 4, 5, 6, 7, 8, 9, 10, 11, 13,
267 15, 17, 19, 23, 27, 31, 35, 43, 51, 59,
268 67, 83, 99, 115, 131, 163, 195, 227, 258
270 const DIST_ADD_BITS: [u8; 30] = [
271 0, 0, 0, 0, 1, 1, 2, 2, 3, 3,
272 4, 4, 5, 5, 6, 6, 7, 7, 8, 8,
273 9, 9, 10, 10, 11, 11, 12, 12, 13, 13
275 const DIST_BASE: [u16; 30] = [
276 1, 2, 3, 4, 5, 7, 9, 13, 17, 25,
277 33, 49, 65, 97, 129, 193, 257, 385, 513, 769,
278 1025, 1537, 2049, 3073, 4097, 6145, 8193, 12289, 16385, 24577
280 const LEN_RECODE: [usize; 19] = [
281 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15
283 const REPEAT_BITS: [u8; 3] = [ 2, 3, 7 ];
284 const REPEAT_BASE: [u8; 3] = [ 3, 3, 11 ];
286 macro_rules! read_bits {
287 ($self: expr, $csrc: expr, $bits: expr) => ({
288 if $csrc.left() < $bits as isize {
290 return Err(DecompressError::ShortData);
292 $csrc.read($bits).unwrap()
296 macro_rules! read_cb {
297 ($self: expr, $csrc: expr, $cb: expr) => ({
298 let ret = $csrc.read_cb($cb);
299 if let Err(CodebookError::MemoryError) = ret {
301 return Err(DecompressError::ShortData);
306 $self.state = InflateState::End;
307 return Err(DecompressError::InvalidData);
314 ///! Creates a new instance of `Inflate` struct.
315 pub fn new() -> Self {
316 let mut cr = FixedLenCodeReader {};
317 let fix_len_cb = Codebook::new(&mut cr, CodebookMode::LSB).unwrap();
319 br: BitReaderState::default(),
327 state: InflateState::Start,
334 len_lengths: [0; 19],
335 all_lengths: [0; NUM_LITERALS + NUM_DISTS],
339 fn put_literal(&mut self, val: u8) {
340 self.buf[self.bpos] = val;
341 self.bpos = (self.bpos + 1) & (self.buf.len() - 1);
344 fn lz_copy(&mut self, offset: usize, len: usize, dst: &mut [u8]) -> DecompressResult<()> {
345 let mask = self.buf.len() - 1;
346 if offset > self.full_pos {
347 return Err(DecompressError::InvalidData);
349 let cstart = (self.bpos.wrapping_sub(offset)) & mask;
351 self.buf[(self.bpos + i) & mask] = self.buf[(cstart + i) & mask];
352 dst[i] = self.buf[(cstart + i) & mask];
354 self.bpos = (self.bpos + len) & mask;
355 self.full_pos += len;
358 ///! Reports whether decoder has finished decoding the input.
359 pub fn is_finished(&self) -> bool {
361 InflateState::End => true,
365 ///! Reports the current amount of bytes output into the destination buffer after the last run.
366 pub fn get_current_output_size(&self) -> usize { self.output_idx }
367 ///! Reports the total amount of bytes decoded so far.
368 pub fn get_total_output_size(&self) -> usize { self.bpos }
369 ///! Tries to decompress input data and write it to the output buffer.
371 ///! Since the decompressor can work with arbitrary input and output chunks its return value may have several meanings:
372 ///! * `Ok(len)` means the stream has been fully decoded and then number of bytes output into the destination buffer is returned.
373 ///! * [`DecompressError::ShortData`] means the input stream has been fully read but more data is needed.
374 ///! * [`DecompressError::OutputFull`] means the output buffer is full and should be flushed. Then decoding should continue on the same input block with `continue_block` parameter set to `true`.
376 ///! [`DecompressError::ShortData`]: ../enum.DecompressError.html#variant.ShortData
377 ///! [`DecompressError::OutputFull`]: ../enum.DecompressError.html#variant.OutputFull
378 #[allow(clippy::comparison_chain)]
379 pub fn decompress_data(&mut self, src: &[u8], dst: &mut [u8], continue_block: bool) -> DecompressResult<usize> {
380 if src.is_empty() || dst.is_empty() {
381 return Err(DecompressError::InvalidArgument);
383 let mut csrc = if !continue_block {
384 CurrentSource::new(src, self.br)
387 CurrentSource::reinit(src, self.br)
389 // check for zlib stream header
390 if let (&InflateState::Start, true) = (&self.state, src.len() > 2) {
391 let cm = src[0] & 0xF;
392 let cinfo = src[0] >> 4;
393 let hdr = (u16::from(src[0]) << 8) | u16::from(src[1]);
394 if cm == 8 && cinfo <= 7 && (hdr % 31) == 0 {
395 csrc.skip(16).unwrap();
400 InflateState::Start | InflateState::BlockStart => {
401 if csrc.left() == 0 {
403 return Err(DecompressError::ShortData);
405 self.final_block = csrc.read_bool().unwrap();
406 self.state = InflateState::BlockMode;
408 InflateState::BlockMode => {
409 let bmode = read_bits!(self, csrc, 2);
413 self.state = InflateState::StaticBlockLen;
415 1 => { self.state = InflateState::FixedBlock; },
416 2 => { self.state = InflateState::DynBlockHlit; },
418 self.state = InflateState::End;
419 return Err(DecompressError::InvalidHeader);
423 InflateState::StaticBlockLen => {
424 let len = read_bits!(self, csrc, 16);
425 self.state = InflateState::StaticBlockInvLen(len);
427 InflateState::StaticBlockInvLen(len) => {
428 let inv_len = read_bits!(self, csrc, 16);
429 if (len ^ inv_len) != 0xFFFF {
430 self.state = InflateState::End;
431 return Err(DecompressError::InvalidHeader);
433 self.state = InflateState::StaticBlockCopy(len as usize);
435 InflateState::StaticBlockCopy(len) => {
439 self.state = InflateState::StaticBlockCopy(len - i);
440 return Err(DecompressError::ShortData);
442 let val = csrc.read(8).unwrap() as u8;
443 self.put_literal(val);
445 self.state = InflateState::BlockStart;
447 InflateState::FixedBlock => {
448 let val = read_cb!(self, csrc, &self.fix_len_cb);
450 if self.output_idx >= dst.len() {
452 self.state = InflateState::FixedBlockLiteral(val as u8);
453 return Err(DecompressError::OutputFull);
455 self.put_literal(val as u8);
456 dst[self.output_idx] = val as u8;
457 self.output_idx += 1;
458 } else if val == 256 {
459 if self.final_block {
460 self.state = InflateState::End;
461 return Ok(self.output_idx);
463 self.state = InflateState::BlockStart;
466 let len_idx = (val - 257) as usize;
467 if len_idx >= LENGTH_BASE.len() {
468 self.state = InflateState::End;
469 return Err(DecompressError::InvalidData);
471 let len_bits = LENGTH_ADD_BITS[len_idx];
472 let add_base = LENGTH_BASE[len_idx] as usize;
474 self.state = InflateState::FixedBlockLengthExt(add_base, len_bits);
476 self.state = InflateState::FixedBlockDist(add_base);
480 InflateState::FixedBlockLiteral(sym) => {
481 if self.output_idx >= dst.len() {
483 return Err(DecompressError::OutputFull);
485 self.put_literal(sym);
486 dst[self.output_idx] = sym;
487 self.output_idx += 1;
488 self.state = InflateState::FixedBlock;
490 InflateState::FixedBlockLengthExt(base, bits) => {
491 let add = read_bits!(self, csrc, bits) as usize;
492 self.state = InflateState::FixedBlockDist(base + add);
494 InflateState::FixedBlockDist(length) => {
495 let dist_idx = reverse_bits(read_bits!(self, csrc, 5), 5) as usize;
496 if dist_idx >= DIST_BASE.len() {
497 self.state = InflateState::End;
498 return Err(DecompressError::InvalidData);
500 let dist_bits = DIST_ADD_BITS[dist_idx];
501 let dist_base = DIST_BASE[dist_idx] as usize;
503 self.state = InflateState::FixedBlockCopy(length, dist_base);
505 self.state = InflateState::FixedBlockDistExt(length, dist_base, dist_bits);
508 InflateState::FixedBlockDistExt(length, base, bits) => {
509 let add = read_bits!(self, csrc, bits) as usize;
510 self.state = InflateState::FixedBlockCopy(length, base + add);
512 InflateState::FixedBlockCopy(length, dist) => {
513 if self.output_idx + length > dst.len() {
514 let copy_size = dst.len() - self.output_idx;
515 let ret = self.lz_copy(dist, copy_size, &mut dst[self.output_idx..]);
517 self.state = InflateState::End;
518 return Err(DecompressError::InvalidData);
520 self.output_idx += copy_size;
522 self.state = InflateState::FixedBlockCopy(length - copy_size, dist);
523 return Err(DecompressError::OutputFull);
525 let ret = self.lz_copy(dist, length, &mut dst[self.output_idx..]);
527 self.state = InflateState::End;
528 return Err(DecompressError::InvalidData);
530 self.output_idx += length;
531 self.state = InflateState::FixedBlock;
533 InflateState::DynBlockHlit => {
534 self.hlit = (read_bits!(self, csrc, 5) as usize) + 257;
535 if self.hlit >= 287 {
536 self.state = InflateState::End;
537 return Err(DecompressError::InvalidHeader);
539 self.state = InflateState::DynBlockHdist;
541 InflateState::DynBlockHdist => {
542 self.hdist = (read_bits!(self, csrc, 5) as usize) + 1;
543 self.state = InflateState::DynBlockHclen;
545 InflateState::DynBlockHclen => {
546 let hclen = (read_bits!(self, csrc, 4) as usize) + 4;
547 self.cur_len_idx = 0;
548 self.len_lengths = [0; 19];
549 self.all_lengths = [0; NUM_LITERALS + NUM_DISTS];
550 self.state = InflateState::DynLengths(hclen);
552 InflateState::DynLengths(len) => {
556 self.state = InflateState::DynLengths(len - i);
557 return Err(DecompressError::ShortData);
559 self.len_lengths[LEN_RECODE[self.cur_len_idx]] = csrc.read(3).unwrap() as u8;
560 self.cur_len_idx += 1;
562 let mut len_codes = [ShortCodebookDesc { code: 0, bits: 0 }; 19];
563 lengths_to_codes(&self.len_lengths, &mut len_codes)?;
564 let mut cr = ShortCodebookDescReader::new(len_codes.to_vec());
565 let ret = Codebook::new(&mut cr, CodebookMode::LSB);
567 self.state = InflateState::End;
568 return Err(DecompressError::InvalidHeader);
570 self.dyn_len_cb = Some(ret.unwrap());
571 self.cur_len_idx = 0;
572 self.state = InflateState::DynCodeLengths;
574 InflateState::DynCodeLengths => {
575 if let Some(ref len_cb) = self.dyn_len_cb {
576 while self.cur_len_idx < self.hlit + self.hdist {
577 let ret = csrc.read_cb(len_cb);
578 let val = match ret {
580 Err(CodebookError::MemoryError) => {
582 return Err(DecompressError::ShortData);
585 self.state = InflateState::End;
586 return Err(DecompressError::InvalidHeader);
590 self.all_lengths[self.cur_len_idx] = val as u8;
591 self.cur_len_idx += 1;
593 let idx = (val as usize) - 16;
595 self.state = InflateState::End;
596 return Err(DecompressError::InvalidHeader);
598 self.state = InflateState::DynCodeLengthsAdd(idx);
602 let (lit_lengths, dist_lengths) = self.all_lengths.split_at(self.hlit);
604 let mut lit_codes = [ShortCodebookDesc { code: 0, bits: 0 }; NUM_LITERALS];
605 lengths_to_codes(&lit_lengths, &mut lit_codes)?;
606 let mut cr = ShortCodebookDescReader::new(lit_codes.to_vec());
607 let ret = Codebook::new(&mut cr, CodebookMode::LSB);
608 if ret.is_err() { return Err(DecompressError::InvalidHeader); }
609 self.dyn_lit_cb = Some(ret.unwrap());
611 let mut dist_codes = [ShortCodebookDesc { code: 0, bits: 0 }; NUM_DISTS];
612 lengths_to_codes(&dist_lengths[..self.hdist], &mut dist_codes)?;
613 let mut cr = ShortCodebookDescReader::new(dist_codes.to_vec());
614 let ret = Codebook::new(&mut cr, CodebookMode::LSB);
615 if ret.is_err() { return Err(DecompressError::InvalidHeader); }
616 self.dyn_dist_cb = Some(ret.unwrap());
618 self.state = InflateState::DynBlock;
623 InflateState::DynCodeLengthsAdd(mode) => {
624 let base = REPEAT_BASE[mode] as usize;
625 let bits = REPEAT_BITS[mode];
626 let len = base + read_bits!(self, csrc, bits) as usize;
627 if self.cur_len_idx + len > self.hlit + self.hdist {
628 self.state = InflateState::End;
629 return Err(DecompressError::InvalidHeader);
631 let rpt = if mode == 0 {
632 if self.cur_len_idx == 0 {
633 self.state = InflateState::End;
634 return Err(DecompressError::InvalidHeader);
636 self.all_lengths[self.cur_len_idx - 1]
641 self.all_lengths[self.cur_len_idx] = rpt;
642 self.cur_len_idx += 1;
644 self.state = InflateState::DynCodeLengths;
646 InflateState::DynBlock => {
647 if let Some(ref lit_cb) = self.dyn_lit_cb {
648 let val = read_cb!(self, csrc, lit_cb);
650 if self.output_idx >= dst.len() {
652 self.state = InflateState::DynBlockLiteral(val as u8);
653 return Err(DecompressError::OutputFull);
655 self.put_literal(val as u8);
656 dst[self.output_idx] = val as u8;
657 self.output_idx += 1;
658 } else if val == 256 {
659 if self.final_block {
660 self.state = InflateState::End;
661 return Ok(self.output_idx);
663 self.state = InflateState::BlockStart;
666 let len_idx = (val - 257) as usize;
667 if len_idx >= LENGTH_BASE.len() {
668 self.state = InflateState::End;
669 return Err(DecompressError::InvalidData);
671 let len_bits = LENGTH_ADD_BITS[len_idx];
672 let add_base = LENGTH_BASE[len_idx] as usize;
674 self.state = InflateState::DynBlockLengthExt(add_base, len_bits);
676 self.state = InflateState::DynBlockDist(add_base);
683 InflateState::DynBlockLiteral(sym) => {
684 if self.output_idx >= dst.len() {
686 return Err(DecompressError::OutputFull);
688 self.put_literal(sym);
689 dst[self.output_idx] = sym;
690 self.output_idx += 1;
691 self.state = InflateState::DynBlock;
693 InflateState::DynBlockLengthExt(base, bits) => {
694 let add = read_bits!(self, csrc, bits) as usize;
695 self.state = InflateState::DynBlockDist(base + add);
697 InflateState::DynBlockDist(length) => {
698 if let Some(ref dist_cb) = self.dyn_dist_cb {
699 let dist_idx = read_cb!(self, csrc, dist_cb) as usize;
700 if dist_idx >= DIST_BASE.len() {
701 self.state = InflateState::End;
702 return Err(DecompressError::InvalidData);
704 let dist_bits = DIST_ADD_BITS[dist_idx];
705 let dist_base = DIST_BASE[dist_idx] as usize;
707 self.state = InflateState::DynCopy(length, dist_base);
709 self.state = InflateState::DynBlockDistExt(length, dist_base, dist_bits);
715 InflateState::DynBlockDistExt(length, base, bits) => {
716 let add = read_bits!(self, csrc, bits) as usize;
717 self.state = InflateState::DynCopy(length, base + add);
719 InflateState::DynCopy(length, dist) => {
720 if self.output_idx + length > dst.len() {
721 let copy_size = dst.len() - self.output_idx;
722 let ret = self.lz_copy(dist, copy_size, &mut dst[self.output_idx..]);
724 self.state = InflateState::End;
725 return Err(DecompressError::InvalidData);
727 self.output_idx += copy_size;
729 self.state = InflateState::DynCopy(length - copy_size, dist);
730 return Err(DecompressError::OutputFull);
732 let ret = self.lz_copy(dist, length, &mut dst[self.output_idx..]);
734 self.state = InflateState::End;
735 return Err(DecompressError::InvalidData);
737 self.output_idx += length;
738 self.state = InflateState::DynBlock;
740 InflateState::End => {
746 ///! Decompresses input data into output returning the uncompressed data length.
747 pub fn uncompress(src: &[u8], dst: &mut [u8]) -> DecompressResult<usize> {
748 let mut inflate = Self::new();
749 inflate.decompress_data(src, dst, false)
753 impl Default for Inflate {
754 fn default() -> Self {
759 fn lengths_to_codes(lens: &[u8], codes: &mut [ShortCodebookDesc]) -> DecompressResult<()> {
760 let mut bits = [0u32; 32];
761 let mut pfx = [0u32; 33];
762 for len in lens.iter() {
763 let len = *len as usize;
764 if len >= bits.len() {
765 return Err(DecompressError::InvalidHeader);
771 for i in 0..bits.len() {
772 code = (code + bits[i]) << 1;
776 for (len, codes) in lens.iter().zip(codes.iter_mut()) {
777 let len = *len as usize;
779 let bits = len as u8;
780 *codes = ShortCodebookDesc { code: reverse_bits(pfx[len], bits), bits };
783 *codes = ShortCodebookDesc { code: 0, bits: 0 };
796 #[allow(clippy::unreadable_literal)]
798 let mut tab = [0u32; 256];
800 let mut c = i as u32;
803 c = 0xEDB88320 ^ (c >> 1);
812 fn update_crc(&mut self, src: &[u8]) {
813 let mut c = !self.crc;
814 for el in src.iter() {
815 c = self.tab[((c ^ u32::from(*el)) & 0xFF) as usize] ^ (c >> 8);
821 ///! Decodes input data in gzip file format (RFC 1952) returning a vector containing decoded data.
822 pub fn gzip_decode(br: &mut ByteReader, skip_crc: bool) -> DecompressResult<Vec<u8>> {
823 const FLAG_HCRC: u8 = 0x02;
824 const FLAG_EXTRA: u8 = 0x04;
825 const FLAG_NAME: u8 = 0x08;
826 const FLAG_COMMENT: u8 = 0x10;
828 let id1 = br.read_byte()?;
829 let id2 = br.read_byte()?;
830 let cm = br.read_byte()?;
831 let flg = br.read_byte()?;
832 let _mtime = br.read_u32le()?;
833 let _xfl = br.read_byte()?;
834 let _os = br.read_byte()?;
835 if id1 != 0x1F || id2 != 0x8B || cm != 8 {
836 return Err(DecompressError::InvalidHeader);
839 if (flg & FLAG_EXTRA) != 0 {
840 let xlen = br.read_u16le()? as usize;
843 if (flg & FLAG_NAME) != 0 {
845 let b = br.read_byte()?;
851 if (flg & FLAG_COMMENT) != 0 {
853 let b = br.read_byte()?;
859 let _hcrc = if (flg & FLAG_HCRC) != 0 {
864 if (flg & 0xE0) != 0 {
865 return Err(DecompressError::Unsupported);
868 let mut output: Vec<u8> = Vec::new();
869 let mut tail = [0u8; 8];
870 let mut inblk = [0u8; 1024];
871 let mut oblk = [0u8; 4096];
872 let mut inflate = Inflate::new();
873 let mut checker = GzipCRC32::new();
876 let ret = br.read_buf_some(&mut inblk);
877 if let Err(ByteIOError::EOF) = ret {
880 let inlen = match ret {
882 Err(_) => return Err(DecompressError::IOError),
884 let mut repeat = false;
886 let ret = inflate.decompress_data(&inblk[..inlen], &mut oblk, repeat);
889 checker.update_crc(&oblk[..outlen]);
890 output.extend_from_slice(&oblk[..outlen]);
893 Err(DecompressError::ShortData) => {
896 Err(DecompressError::OutputFull) => {
898 checker.update_crc(&oblk);
899 output.extend_from_slice(&oblk);
906 // Save last 8 bytes for CRC and size.
908 tail.copy_from_slice(&inblk[inlen - 8..][..8]);
910 let shift_len = 8 - inlen;
911 for i in 0..shift_len {
912 tail[i] = tail[i + inlen];
914 for i in shift_len..8 {
915 tail[i] = inblk[i - shift_len];
920 if !inflate.is_finished() { println!("???"); }
921 let crc = read_u32le(&tail[0..4])?;
922 let size = read_u32le(&tail[4..8])?;
923 if size != (output.len() as u32) {
924 return Err(DecompressError::CRCError);
926 if crc != checker.crc {
927 return Err(DecompressError::CRCError);
934 #[derive(Clone,Copy,Default)]
942 const TOKEN_EOB: Token = Token { sym: 256, distsym: 0, len: 0, dist: 0 };
945 fn from_literal(sym: u8) -> Self {
953 fn from_match(dist: u16, len: u16) -> Self {
954 let sym = match len {
955 3..= 10 => 257 + len - 3,
956 11..= 18 => 265 + (len - 11) / 2,
957 19..= 34 => 269 + (len - 19) / 4,
958 35..= 66 => 273 + (len - 35) / 8,
959 67..=130 => 277 + (len - 67) / 16,
960 131..=257 => 281 + (len - 131) / 32,
963 let distsym = if dist <= 4 {
966 let bits = 16 - (dist - 1).leading_zeros();
967 (bits as u8) * 2 - 2 + if ((dist - 1) & (1 << (bits - 2))) != 0 { 1 } else { 0 }
970 sym, distsym, len, dist
975 fn add_codes(lens: &[u8], stats: &mut [u32], toks: &mut Vec<(u8, u8)>) {
979 for &len in lens.iter() {
985 let run = lcount.min(138);
987 toks.push((18, run - 11));
992 toks.push((17, lcount - 3));
1001 let run = lcount.min(6);
1003 toks.push((16, run - 3));
1006 for _ in 0..lcount {
1007 stats[last as usize] += 1;
1008 toks.push((last, 0));
1011 stats[len as usize] += 1;
1012 toks.push((len, 0));
1020 let run = lcount.min(138);
1022 toks.push((18, run - 11));
1027 toks.push((17, lcount - 3));
1030 for _ in 0..lcount {
1036 let run = lcount.min(6);
1038 toks.push((16, run - 3));
1041 for _ in 0..lcount {
1042 stats[last as usize] += 1;
1043 toks.push((last, 0));
1049 ///! Deflate stream writer.
1050 pub struct DeflateWriter {
1056 impl DeflateWriter {
1057 ///! Creates a new instance of `DeflateWriter` for a provided output.
1058 pub fn new(dst: Vec<u8>) -> Self {
1065 fn align(&mut self) {
1066 if (self.bits & 7) != 0 {
1067 self.bits += 8 - (self.bits & 7);
1070 fn flush(&mut self) {
1071 while self.bits >= 8 {
1072 self.dst.push(self.bbuf as u8);
1077 fn write(&mut self, val: u16, len: u8) {
1079 self.bbuf |= u32::from(val) << self.bits;
1082 ///! Finishes writing the stream and returns the output vector.
1083 pub fn end(mut self) -> Vec<u8> {
1086 self.dst.push(self.bbuf as u8);
1091 fn write_codes(&mut self, codes: &CodeHuff, dists: &DistHuff) {
1092 let mut stats = [0u32; 19];
1093 let mut toks = Vec::with_capacity(NUM_LITERALS + NUM_DISTS);
1094 let mut cw = [0u16; 19];
1095 let mut cl = [0u8; 19];
1098 add_codes(&codes.lens[..codes.num_codes], &mut stats, &mut toks);
1099 add_codes(&dists.lens[..dists.num_codes], &mut stats, &mut toks);
1101 gen_tree(&mut cw, &mut cl, &mut nc, &mut stats, 7);
1104 for &idx in LEN_RECODE.iter().rev() {
1114 self.write((nc - 4) as u16, 4);
1115 for &idx in LEN_RECODE.iter().take(nc) {
1116 self.write(u16::from(cl[idx]), 3);
1118 for &(sym, add) in toks.iter() {
1119 self.write(cw[sym as usize], cl[sym as usize]);
1121 16 => self.write(u16::from(add), 2),
1122 17 => self.write(u16::from(add), 3),
1123 18 => self.write(u16::from(add), 7),
1128 fn write_tokens(&mut self, src: &[Token], codes: &CodeHuff, dists: &DistHuff) {
1129 for &tok in src.iter() {
1130 self.write(codes.codes[tok.sym as usize], codes.lens[tok.sym as usize]);
1132 self.write_len_bits(tok.len);
1133 self.write(dists.codes[tok.distsym as usize], dists.lens[tok.distsym as usize]);
1134 self.write_dist_bits(tok.dist);
1138 fn write_len_bits(&mut self, len: u16) {
1140 if llen >= 8 && llen < 255 {
1141 let bits = (16 - llen.leading_zeros() - 3) as u8;
1142 self.write(llen & ((1 << bits) - 1), bits);
1145 fn write_dist_bits(&mut self, dist: u16) {
1146 let ddist = dist - 1;
1148 let bits = (16 - ddist.leading_zeros() - 2) as u8;
1149 self.write(ddist & ((1 << bits) - 1), bits);
1156 stats: [u32; NUM_LITERALS],
1157 codes: [u16; NUM_LITERALS],
1158 lens: [u8; NUM_LITERALS],
1163 fn new(is_fixed: bool) -> Self {
1166 stats: [0; NUM_LITERALS],
1167 codes: [0; NUM_LITERALS],
1168 lens: [0; NUM_LITERALS],
1169 num_codes: NUM_LITERALS,
1172 fn make_codes(&mut self, src: &[Token]) {
1175 self.codes[i] = reverse_bits((i + 0x30) as u32, 8) as u16;
1178 for i in 144..=255 {
1179 self.codes[i] = reverse_bits((i + 0x100) as u32, 9) as u16;
1182 for i in 256..=279 {
1183 self.codes[i] = reverse_bits((i & 0x1F) as u32, 7) as u16;
1186 for i in 280..NUM_LITERALS {
1187 self.codes[i] = reverse_bits((i - 280 + 0xC0) as u32, 8) as u16;
1191 for &tok in src.iter() {
1192 self.stats[tok.sym as usize] += 1;
1194 gen_tree(&mut self.codes, &mut self.lens, &mut self.num_codes, &mut self.stats, 15);
1195 if self.num_codes < 257 {
1196 self.num_codes = 257;
1204 stats: [u32; NUM_DISTS],
1205 codes: [u16; NUM_DISTS],
1206 lens: [u8; NUM_DISTS],
1211 fn new(is_fixed: bool) -> Self {
1214 stats: [0; NUM_DISTS],
1215 codes: [0; NUM_DISTS],
1216 lens: [0; NUM_DISTS],
1217 num_codes: NUM_DISTS,
1220 fn make_codes(&mut self, src: &[Token]) {
1222 for i in 0..NUM_DISTS {
1223 self.codes[i] = reverse_bits(i as u32, 5) as u16;
1227 for &tok in src.iter() {
1229 self.stats[tok.distsym as usize] += 1;
1232 gen_tree(&mut self.codes, &mut self.lens, &mut self.num_codes, &mut self.stats, 15);
1233 if self.num_codes < 1 {
1240 #[derive(Clone,Copy,Default)]
1248 const NODE_SYM: u16 = 65500;
1251 nodes: [Node; NUM_LITERALS * 2],
1258 nodes: [Node::default(); NUM_LITERALS * 2],
1262 fn insert(&mut self, val: Node) {
1263 let mut idx = self.nnodes;
1264 for (i, nn) in self.nodes[..self.nnodes].iter().enumerate() {
1270 if idx < self.nnodes {
1271 for i in (idx..self.nnodes).rev() {
1272 self.nodes[i + 1] = self.nodes[i];
1275 self.nodes[idx] = val;
1278 fn trim(&mut self) {
1279 let mut start = self.nnodes;
1280 for (i, n) in self.nodes[..self.nnodes].iter().enumerate() {
1287 for i in 0..self.nnodes - start {
1288 self.nodes[i] = self.nodes[i + start];
1290 self.nnodes -= start;
1293 fn build(&mut self) {
1294 if self.nnodes == 1 {
1295 self.nodes[0].w = 1;
1299 while start + 1 < self.nnodes {
1300 let nn1 = self.nodes[start];
1301 let nn2 = self.nodes[start + 1];
1306 idx1: (start + 1) as u16,
1308 self.nodes[start].w = 0;
1309 self.nodes[start + 1].w = 0;
1313 if self.nnodes > 1 {
1314 self.assign_len(self.nnodes - 1, 0);
1317 fn assign_len(&mut self, idx: usize, len: u16) {
1318 if self.nodes[idx].sym == NODE_SYM {
1319 self.assign_len(self.nodes[idx].idx0 as usize, len + 1);
1320 self.assign_len(self.nodes[idx].idx1 as usize, len + 1);
1322 self.nodes[idx].w = len;
1327 fn gen_tree(codes: &mut [u16], lens: &mut [u8], num_codes: &mut usize, stats: &mut [u32], max_bits: u8) {
1329 for &w in stats.iter() {
1338 while tot_w > (1 << max_bits) {
1339 for w in stats.iter_mut() {
1343 for &w in stats.iter() {
1347 let mut tree = Tree::new();
1348 for (sym, &w) in stats.iter().enumerate() {
1349 tree.insert(Node{ sym: sym as u16, w: w as u16, idx0: 64000, idx1: 64000 });
1354 for n in tree.nodes[..tree.nnodes].iter() {
1355 if n.sym != NODE_SYM {
1356 lens[n.sym as usize] = n.w as u8;
1359 lengths_to_codes16(lens, codes);
1360 let mut sz = codes.len();
1361 for &len in lens.iter().rev() {
1370 fn lengths_to_codes16(lens: &[u8], codes: &mut [u16]) {
1371 let mut bits = [0u32; 32];
1372 let mut pfx = [0u32; 33];
1373 for len in lens.iter() {
1374 let len = *len as usize;
1379 for i in 0..bits.len() {
1380 code = (code + bits[i]) << 1;
1384 for (len, codes) in lens.iter().zip(codes.iter_mut()) {
1385 let len = *len as usize;
1387 let bits = len as u8;
1388 *codes = reverse_bits(pfx[len], bits) as u16;
1397 fn parse(&mut self, src: &[u8], dst: &mut Vec<Token>);
1401 impl LZParse for NoParser {
1402 fn parse(&mut self, src: &[u8], dst: &mut Vec<Token>) {
1403 dst.reserve(src.len());
1404 for &b in src.iter() {
1405 dst.push(Token::from_literal(b));
1407 dst.push(TOKEN_EOB);
1411 fn check_match(src1: &[u8], src2: &[u8]) -> u16 {
1413 for (&a, &b) in src1.iter().zip(src2.iter()) {
1422 const HASH_SIZE: usize = 4096;
1423 const MAX_MATCH_LEN: usize = 258;
1424 const WINDOW_SIZE: usize = 32768 - MAX_MATCH_LEN;
1425 const NONEXT: usize = WINDOW_SIZE * 2;
1427 struct MatchFinder<'a> {
1430 hstart: [usize; HASH_SIZE],
1431 hend: [usize; HASH_SIZE],
1432 hnext: [usize; WINDOW_SIZE * 3],
1435 impl<'a> MatchFinder<'a> {
1436 fn new(src: &'a [u8]) -> Self {
1437 let mut obj = Self {
1440 hstart: [0; HASH_SIZE],
1441 hend: [0; HASH_SIZE],
1442 hnext: [0; WINDOW_SIZE * 3],
1447 fn hash(src: &[u8]) -> usize {
1449 (((u16::from(src[0]) << 10) ^ (u16::from(src[1]) << 5) ^ u16::from(src[2])) & ((HASH_SIZE as u16) - 1)) as usize
1451 fn build_hash(&mut self) {
1452 for el in self.hstart.iter_mut() { *el = NONEXT; }
1453 for el in self.hend.iter_mut() { *el = NONEXT; }
1454 for el in self.hnext.iter_mut() { *el = NONEXT; }
1455 if self.pos + 3 >= self.src.len() {
1458 let end = (self.src.len() - 3).min(self.pos + NONEXT);
1459 for i in (self.pos .. end).rev() {
1460 let key = Self::hash(&self.src[i..]);
1461 if self.hstart[key] == NONEXT {
1462 self.hstart[key] = i;
1464 self.hnext[key] = NONEXT;
1466 self.hnext[self.hend[key]] = i;
1471 fn find_match(&mut self) -> (u16, u16) {
1472 if self.pos == 0 || self.pos + 3 > self.src.len() {
1475 let key = Self::hash(&self.src[self.pos..]) as usize;
1477 let mut best_pos = 0;
1478 let mut best_len = 0;
1479 let mut idx = self.hstart[key];
1480 while idx != NONEXT && idx + WINDOW_SIZE > self.pos {
1482 let cur_len = check_match(&self.src[self.pos..], &self.src[idx..]);
1483 if cur_len > best_len {
1485 best_pos = self.pos - idx;
1486 if best_len >= (MAX_MATCH_LEN as u16) {
1487 return (best_pos as u16, MAX_MATCH_LEN as u16);
1491 idx = self.hnext[idx];
1493 (best_pos as u16, best_len)
1495 fn find_all_matches(&mut self, dists: &mut [u16; MAX_MATCH_LEN + 1]) {
1496 if self.pos == 0 || self.pos + 3 > self.src.len() {
1499 let key = Self::hash(&self.src[self.pos..]) as usize;
1500 let mut idx = self.hstart[key];
1501 while idx != NONEXT && idx + WINDOW_SIZE > self.pos {
1503 let cur_len = (check_match(&self.src[self.pos..], &self.src[idx..]) as usize).min(MAX_MATCH_LEN);
1504 if cur_len > 0 && dists[cur_len] == 0 {
1505 dists[cur_len] = (self.pos - idx) as u16;
1508 idx = self.hnext[idx];
1511 fn advance(&mut self, num: usize) {
1514 if self.pos >= NONEXT {
1515 let (_, tail) = self.src.split_at(self.pos - WINDOW_SIZE);
1517 self.pos = WINDOW_SIZE;
1520 for (start, end) in self.hstart.iter_mut().zip(self.hend.iter_mut()) {
1521 let mut idx = *start;
1522 while idx != NONEXT && idx + WINDOW_SIZE < self.pos {
1523 idx = self.hnext[idx];
1532 fn get_sym(&self) -> u8 { self.src[self.pos] }
1533 fn is_end(&self) -> bool { self.pos >= self.src.len() }
1536 struct GreedyParser {}
1537 impl LZParse for GreedyParser {
1538 fn parse(&mut self, src: &[u8], dst: &mut Vec<Token>) {
1539 dst.reserve(src.len());
1541 let mut matcher = MatchFinder::new(src);
1542 while !matcher.is_end() {
1543 let (best_pos, best_len) = matcher.find_match();
1546 dst.push(Token::from_match(best_pos, best_len));
1547 matcher.advance(best_len as usize);
1549 dst.push(Token::from_literal(matcher.get_sym()));
1553 dst.push(TOKEN_EOB);
1557 struct LazyParser {}
1558 impl LZParse for LazyParser {
1559 fn parse(&mut self, src: &[u8], dst: &mut Vec<Token>) {
1560 dst.reserve(src.len());
1562 let mut matcher = MatchFinder::new(src);
1563 while !matcher.is_end() {
1564 let (best_pos, best_len) = matcher.find_match();
1566 let last_sym = matcher.get_sym();
1568 if !matcher.is_end() {
1569 let (best_pos1, best_len1) = matcher.find_match();
1570 if best_len1 > best_len + 1 {
1571 dst.push(Token::from_literal(last_sym));
1572 dst.push(Token::from_match(best_pos1, best_len1));
1573 matcher.advance(best_len1 as usize);
1577 dst.push(Token::from_match(best_pos, best_len));
1578 matcher.advance((best_len - 1) as usize);
1580 dst.push(Token::from_literal(matcher.get_sym()));
1584 dst.push(TOKEN_EOB);
1588 #[derive(Clone,Copy)]
1595 impl Default for TNode {
1596 fn default() -> Self {
1598 price: std::u32::MAX,
1605 struct OptimalParser {
1606 trellis: Vec<TNode>,
1608 impl OptimalParser {
1609 fn new() -> Self { Self::default() }
1610 fn sym_price(_sym: u8) -> u32 { 9 }
1611 fn match_price(dist: u16, _len: u16) -> u32 {
1615 let bits = 16 - (dist - 1).leading_zeros();
1620 impl Default for OptimalParser {
1621 fn default() -> Self {
1623 trellis: Vec::with_capacity(WINDOW_SIZE),
1627 impl LZParse for OptimalParser {
1628 fn parse(&mut self, src: &[u8], dst: &mut Vec<Token>) {
1630 dst.push(TOKEN_EOB);
1633 dst.reserve(src.len());
1635 self.trellis.truncate(0);
1636 self.trellis.reserve(src.len() + 1);
1637 for _ in 0..=src.len() {
1638 self.trellis.push(TNode::default());
1640 self.trellis[0].price = 0;
1642 let mut matcher = MatchFinder::new(src);
1643 for i in 0..self.trellis.len() - 1 {
1644 let mut dists = [0; MAX_MATCH_LEN + 1];
1645 matcher.find_all_matches(&mut dists);
1647 let sym = matcher.get_sym();
1648 let lprice = Self::sym_price(sym) + self.trellis[i].price;
1649 if self.trellis[i + 1].price > lprice {
1650 self.trellis[i + 1].price = lprice;
1651 self.trellis[i + 1].link = i;
1653 for (len, &dist) in dists.iter().enumerate() {
1655 let mprice = Self::match_price(dist, len as u16) + self.trellis[i].price;
1656 if self.trellis[i + len].price > mprice {
1657 self.trellis[i + len].price = mprice;
1658 self.trellis[i + len].link = i;
1659 self.trellis[i].dist = dist;
1666 let mut idx = self.trellis.len() - 1;
1667 let mut nidx = self.trellis[idx].link;
1671 nidx = self.trellis[idx].link;
1672 self.trellis[idx].link = oidx;
1676 while idx < self.trellis.len() - 1 {
1677 let len = self.trellis[idx].link - idx;
1679 dst.push(Token::from_literal(src[idx]));
1681 dst.push(Token::from_match(self.trellis[idx].dist, len as u16));
1683 idx = self.trellis[idx].link;
1686 dst.push(TOKEN_EOB);
1690 ///! Deflate compression mode.
1691 #[derive(Clone,Copy,Debug,PartialEq)]
1692 pub enum DeflateMode {
1693 ///! No compression.
1695 ///! Fast compression.
1697 ///! Still fast but better compression.
1699 ///! Slow but the best compression.
1703 #[derive(Clone,Copy,Debug,PartialEq)]
1710 const MAX_BLOCK_SIZE: usize = 65535;
1712 ///! Deflate stream compressor.
1713 pub struct Deflate {
1716 srcbuf: [u8; MAX_BLOCK_SIZE],
1721 parser: Box<dyn LZParse>,
1725 ///! Creates a new instance of `Deflate`.
1726 pub fn new(mode: DeflateMode) -> Self {
1727 let (mode, parser) = match mode {
1728 DeflateMode::NoCompr => (Mode::Copy, Box::new(NoParser{}) as Box<dyn LZParse>),
1729 DeflateMode::Fast => (Mode::Fixed, Box::new(GreedyParser{}) as Box<dyn LZParse>),
1730 DeflateMode::Better => (Mode::Dynamic, Box::new(LazyParser{}) as Box<dyn LZParse>),
1731 DeflateMode::Best => (Mode::Dynamic, Box::new(OptimalParser::new()) as Box<dyn LZParse>),
1735 tokens: Vec::with_capacity(MAX_BLOCK_SIZE),
1736 srcbuf: [0; MAX_BLOCK_SIZE],
1743 ///! Writes zlib stream header.
1744 pub fn write_zlib_header(&mut self, wr: &mut DeflateWriter) {
1747 let level = match self.mode {
1749 Mode::Fixed => 0x5E,
1750 Mode::Dynamic => 0x9C,
1751 // 0xDA for the strongest one
1754 self.zlib_mode = true;
1756 fn write_zlib_footer(&self, wr: &mut DeflateWriter) {
1758 wr.write((self.sum2 >> 8) as u16, 8);
1759 wr.write((self.sum2 & 0xFF) as u16, 8);
1760 wr.write((self.sum1 >> 8) as u16, 8);
1761 wr.write((self.sum1 & 0xFF) as u16, 8);
1763 ///! Queues data for compression.
1765 ///! The data might be not actually compressed until [`compress_end`] is called.
1767 ///! [`compress_end`]: ./struct.Deflate.html#method.compress_end
1768 pub fn compress(&mut self, src: &[u8], wr: &mut DeflateWriter) {
1770 while !src.is_empty() {
1771 let clen = src.len().min(MAX_BLOCK_SIZE - self.ssize);
1772 let (head, tail) = src.split_at(clen);
1774 self.srcbuf[self.ssize..][..clen].copy_from_slice(head);
1776 if self.ssize == MAX_BLOCK_SIZE {
1777 self.do_block(wr, false);
1781 ///! Tells the encoder to finish data compression.
1783 ///! Complete data will be output after this call.
1784 pub fn compress_end(&mut self, wr: &mut DeflateWriter) {
1786 self.do_block(wr, true);
1790 wr.write(0, 7); //static EOF sym
1793 self.write_zlib_footer(wr);
1796 fn do_block(&mut self, wr: &mut DeflateWriter, final_block: bool) {
1797 const CRC_BASE: u32 = 65521;
1798 for &b in self.srcbuf[..self.ssize].iter() {
1799 self.sum1 += u32::from(b);
1800 if self.sum1 >= CRC_BASE {
1801 self.sum1 -= CRC_BASE;
1803 self.sum2 += self.sum1;
1804 if self.sum2 >= CRC_BASE {
1805 self.sum2 -= CRC_BASE;
1810 wr.write(final_block as u16, 1);
1813 wr.write(self.ssize as u16, 16);
1814 wr.write(!self.ssize as u16, 16);
1815 for &b in self.srcbuf[..self.ssize].iter() {
1816 wr.write(u16::from(b), 8);
1820 wr.write(final_block as u16, 1);
1822 self.tokens.truncate(0);
1823 self.parser.parse(&self.srcbuf[..self.ssize], &mut self.tokens);
1824 let mut codes = CodeHuff::new(true);
1825 codes.make_codes(&self.tokens);
1826 let mut dists = DistHuff::new(true);
1827 dists.make_codes(&self.tokens);
1828 wr.write_tokens(&self.tokens, &codes, &dists);
1831 wr.write(final_block as u16, 1);
1833 self.tokens.truncate(0);
1834 self.parser.parse(&self.srcbuf[..self.ssize], &mut self.tokens);
1835 let mut codes = CodeHuff::new(false);
1836 codes.make_codes(&self.tokens);
1837 let mut dists = DistHuff::new(false);
1838 dists.make_codes(&self.tokens);
1839 wr.write((codes.num_codes - 257) as u16, 5);
1840 wr.write((dists.num_codes - 1) as u16, 5);
1841 wr.write_codes(&codes, &dists);
1842 wr.write_tokens(&self.tokens, &codes, &dists);
1854 fn test_inflate1() {
1855 const TEST_DATA: &[u8] = &[
1856 0xF3, 0x48, 0xCD, 0xC9, 0xC9, 0xD7, 0x51, 0x28,
1857 0xCF, 0x2F, 0xCA, 0x49, 0x51, 0x04, 0x00 ];
1858 const TEST_REF: &[u8] = b"Hello, world!";
1859 let mut dst_buf = [0u8; 13];
1860 let len = Inflate::uncompress(TEST_DATA, &mut dst_buf).unwrap();
1861 assert_eq!(len, 13);
1863 assert_eq!(dst_buf[i], TEST_REF[i]);
1867 fn test_inflate2() {
1868 const TEST_DATA3: &[u8] = &[ 0x4B, 0x4C, 0x44, 0x80, 0x24, 0x54, 0x80, 0x2C, 0x06, 0x00 ];
1869 const TEST_REF3: &[u8] = b"aaaaaaaaaaaabbbbbbbbbbbbbbbaaaaabbbbbbb";
1870 let mut dst_buf = [0u8; 39];
1872 let mut inflate = Inflate::new();
1873 let mut output_chunk = [0u8; 7];
1874 let mut output_pos = 0;
1875 for input in TEST_DATA3.chunks(3) {
1876 let mut repeat = false;
1878 let ret = inflate.decompress_data(input, &mut output_chunk, repeat);
1882 dst_buf[output_pos + i] = output_chunk[i];
1887 Err(DecompressError::ShortData) => {
1890 Err(DecompressError::OutputFull) => {
1892 for i in 0..output_chunk.len() {
1893 dst_buf[output_pos + i] = output_chunk[i];
1895 output_pos += output_chunk.len();
1898 panic!("decompress error {:?}", ret.err().unwrap());
1904 assert_eq!(output_pos, dst_buf.len());
1905 for i in 0..output_pos {
1906 assert_eq!(dst_buf[i], TEST_REF3[i]);
1910 fn test_inflate3() {
1911 const TEST_DATA: &[u8] = &[
1912 0x1F, 0x8B, 0x08, 0x08, 0xF6, 0x7B, 0x90, 0x5E, 0x02, 0x03, 0x31, 0x2E, 0x74, 0x78, 0x74, 0x00,
1913 0xE5, 0x95, 0x4B, 0x4E, 0xC3, 0x30, 0x10, 0x40, 0xF7, 0x39, 0xC5, 0x1C, 0x00, 0x16, 0x70, 0x83,
1914 0x0A, 0xB5, 0x3B, 0xE8, 0x82, 0x5E, 0x60, 0x1A, 0x4F, 0xE2, 0x11, 0xFE, 0x44, 0x1E, 0xA7, 0x69,
1915 0x6E, 0xCF, 0x38, 0xDD, 0xB0, 0x40, 0xA2, 0x46, 0x2D, 0x20, 0x2A, 0xE5, 0xAB, 0xCC, 0xE7, 0xBD,
1916 0x49, 0xAC, 0x6C, 0x03, 0x64, 0x4B, 0xD0, 0x71, 0x92, 0x0C, 0x06, 0x67, 0x88, 0x1D, 0x3C, 0xD9,
1917 0xC4, 0x92, 0x3D, 0x4A, 0xF3, 0x3C, 0x43, 0x4E, 0x23, 0x81, 0x8B, 0x07, 0x82, 0x1E, 0xF5, 0x90,
1918 0x23, 0x78, 0x6A, 0x56, 0x30, 0x60, 0xCA, 0x89, 0x4D, 0x4F, 0xC0, 0x01, 0x10, 0x06, 0xC2, 0xA4,
1919 0xA1, 0x44, 0xCD, 0xF6, 0x54, 0x50, 0xA8, 0x8D, 0xC1, 0x9C, 0x5F, 0x71, 0x37, 0x45, 0xC8, 0x63,
1920 0xCA, 0x8E, 0xC0, 0xE8, 0x23, 0x69, 0x56, 0x9A, 0x8D, 0x5F, 0xB6, 0xC9, 0x96, 0x53, 0x4D, 0x17,
1921 0xAB, 0xB9, 0xB0, 0x49, 0x14, 0x5A, 0x0B, 0x96, 0x82, 0x7C, 0xB7, 0x6F, 0x17, 0x35, 0xC7, 0x9E,
1922 0xDF, 0x78, 0xA3, 0xF1, 0xD0, 0xA2, 0x73, 0x1C, 0x7A, 0xD8, 0x2B, 0xB3, 0x5C, 0x90, 0x85, 0xBB,
1923 0x2A, 0x14, 0x2E, 0xF7, 0xD1, 0x19, 0x48, 0x0A, 0x23, 0x57, 0x45, 0x13, 0x3E, 0xD6, 0xA0, 0xBD,
1924 0xF2, 0x11, 0x7A, 0x22, 0x21, 0xAD, 0xE5, 0x70, 0x56, 0xA0, 0x9F, 0xA5, 0xA5, 0x03, 0x85, 0x2A,
1925 0xDE, 0x92, 0x00, 0x32, 0x61, 0x10, 0xAD, 0x27, 0x13, 0x7B, 0x5F, 0x98, 0x7F, 0x59, 0x83, 0xB8,
1926 0xB7, 0x35, 0x16, 0xEB, 0x12, 0x0F, 0x1E, 0xD9, 0x14, 0x0B, 0xCF, 0xEE, 0x6D, 0x91, 0xF8, 0x93,
1927 0x6E, 0x81, 0x3F, 0x7F, 0x41, 0xA4, 0x22, 0x1F, 0xB7, 0xE6, 0x85, 0x83, 0x9A, 0xA2, 0x61, 0x12,
1928 0x0D, 0x0F, 0x6D, 0x01, 0xBD, 0xB0, 0xE8, 0x1D, 0xEC, 0xD1, 0xA0, 0xBF, 0x1F, 0x4E, 0xFB, 0x55,
1929 0xBD, 0x73, 0xDD, 0x87, 0xB9, 0x53, 0x23, 0x17, 0xD3, 0xE2, 0xE9, 0x08, 0x87, 0x42, 0xFF, 0xCF,
1930 0x26, 0x42, 0xAE, 0x76, 0xB5, 0xAE, 0x97, 0x0C, 0x18, 0x78, 0xA0, 0x24, 0xE5, 0x54, 0x0C, 0x6E,
1931 0x60, 0x52, 0x79, 0x22, 0x57, 0xF5, 0x87, 0x78, 0x78, 0x04, 0x93, 0x46, 0xEF, 0xCB, 0x98, 0x96,
1932 0x8B, 0x65, 0x00, 0xB7, 0x36, 0xBD, 0x77, 0xA8, 0xBD, 0x5A, 0xAA, 0x1A, 0x09, 0x00, 0x00
1935 let mut mr = MemoryReader::new_read(TEST_DATA);
1936 let mut br = ByteReader::new(&mut mr);
1937 let _dst_buf = gzip_decode(&mut br, false).unwrap();
1939 // println!("{}", String::from_utf8_lossy(_dst_buf.as_slice()));
1942 fn test_deflate_crc() {
1943 let output = Vec::with_capacity(20);
1944 let mut writer = DeflateWriter::new(output);
1945 let mut compr = Deflate::new(DeflateMode::NoCompr);
1946 compr.write_zlib_header(&mut writer);
1947 compr.compress(b"Hello, world!", &mut writer);
1948 compr.compress_end(&mut writer);
1949 let output = writer.end();
1950 assert_eq!(output.as_slice(), b"\x78\x01\x01\x0D\x00\xF2\xFFHello, world!\x20\x5E\x04\x8A");
1952 fn deflate_test(mode: DeflateMode) {
1954 b"The first day of Christmas,
1955 My true love sent to me
1956 A partridge in a pear tree.
1958 The second day of Christmas,
1959 My true love sent to me
1960 Two turtle doves, and
1961 A partridge in a pear tree.
1963 The third day of Christmas,
1964 My true love sent to me
1966 Two turtle doves, and
1967 A partridge in a pear tree.
1969 The fourth day of Christmas,
1970 My true love sent to me
1973 Two turtle doves, and
1974 A partridge in a pear tree.
1976 The fifth day of Christmas,
1977 My true love sent to me
1981 Two turtle doves, and
1982 A partridge in a pear tree.";
1983 let output = Vec::with_capacity(SRC.len() + 16);
1984 let mut writer = DeflateWriter::new(output);
1985 let mut compr = Deflate::new(mode);
1986 compr.write_zlib_header(&mut writer);
1987 compr.compress(SRC, &mut writer);
1988 compr.compress_end(&mut writer);
1989 let output = writer.end();
1990 let mut uncompr = vec![0u8; SRC.len()];
1991 Inflate::uncompress(&output, &mut uncompr).unwrap();
1992 assert_eq!(SRC, uncompr.as_slice());
1995 fn test_deflate_fast() {
1996 deflate_test(DeflateMode::Fast);
1999 fn test_deflate_better() {
2000 deflate_test(DeflateMode::Better);
2003 fn test_deflate_best() {
2004 deflate_test(DeflateMode::Best);