1 //! Deflate format (RFC 1951) support.
3 //! This module provides functionality for decompressing raw deflated streams via [`Inflate`] and gzip files (RFC 1952) via [`gzip_decode`] and compressing raw or zlib streams via [`Deflate`].
5 //! [`Deflate`]: ./struct.Deflate.html
6 //! [`Inflate`]: ./struct.Inflate.html
7 //! [`gzip_decode`]: ./fn.gzip_decode.html
11 //! Decompressing full input buffer into sufficiently large output buffer:
13 //! # use nihav_core::compr::DecompressError;
14 //! use nihav_core::compr::deflate::Inflate;
16 //! # fn decompress(input: &[u8]) -> Result<(), DecompressError> {
17 //! # let mut output_buffer = [0u8; 16];
18 //! let output_length = Inflate::uncompress(input, &mut output_buffer)?;
23 //! Decompressing input chunks into portions of output:
25 //! use nihav_core::compr::DecompressError;
26 //! use nihav_core::compr::deflate::Inflate;
28 //! # fn decompress(input_data: &[u8]) -> Result<(), DecompressError> {
29 //! let mut inflate = Inflate::new();
30 //! let mut dst_buf: Vec<u8> = Vec::new();
31 //! let mut output_chunk = [0u8; 1024];
32 //! for src in input_data.chunks(512) {
33 //! let mut repeat = false;
35 //! let ret = inflate.decompress_data(src, &mut output_chunk, repeat);
37 //! Ok(len) => { // we got a buffer decoded successfully to the end
38 //! dst_buf.extend_from_slice(&output_chunk[..len]);
41 //! Err(DecompressError::ShortData) => { // this block of data was fully read
44 //! Err(DecompressError::OutputFull) => {
45 //! // the output buffer is full, flush it and continue decoding the same block
47 //! dst_buf.extend_from_slice(&output_chunk);
59 //! Compressing input buffer into zlib stream:
61 //! use nihav_core::compr::deflate::{Deflate, DeflateMode, DeflateWriter};
63 //! # fn compress(input: &[u8]) {
64 //! let output = Vec::with_capacity(input.len() * 65540 / 65535 + 6);
65 //! let mut writer = DeflateWriter::new(output);
66 //! let mut compr = Deflate::new(DeflateMode::Fast);
67 //! compr.write_zlib_header(&mut writer);
68 //! compr.compress(input, &mut writer);
69 //! compr.compress_end(&mut writer);
70 //! let output = writer.end();
74 use crate::io::byteio::*;
75 use crate::io::bitreader::*;
76 use crate::io::codebook::*;
79 const NUM_LITERALS: usize = 287;
80 const NUM_DISTS: usize = 32;
82 struct FixedLenCodeReader {}
84 impl CodebookDescReader<u16> for FixedLenCodeReader {
85 fn bits(&mut self, idx: usize) -> u8 {
87 else if idx < 256 { 9 }
88 else if idx < 280 { 7 }
91 #[allow(clippy::identity_op)]
92 fn code(&mut self, idx: usize) -> u32 {
93 let base = idx as u32;
94 let bits = self.bits(idx);
95 if idx < 144 { reverse_bits(base + 0x30, bits) }
96 else if idx < 256 { reverse_bits(base + 0x190 - 144, bits) }
97 else if idx < 280 { reverse_bits(base + 0x000 - 256, bits) }
98 else { reverse_bits(base + 0xC0 - 280, bits) }
100 fn sym (&mut self, idx: usize) -> u16 { idx as u16 }
101 fn len(&mut self) -> usize { NUM_LITERALS + 1 }
104 #[derive(Clone,Copy,Default)]
105 struct BitReaderState {
111 struct CurrentSource<'a> {
116 impl<'a> CurrentSource<'a> {
117 fn new(src: &'a [u8], br: BitReaderState) -> Self {
118 let mut newsrc = Self { src, br };
123 fn reinit(src: &'a [u8], br: BitReaderState) -> Self {
124 let mut newsrc = Self { src, br };
128 fn refill(&mut self) {
129 while (self.br.bits <= 24) && (self.br.pos < self.src.len()) {
130 self.br.bitbuf |= u32::from(self.src[self.br.pos]) << self.br.bits;
135 fn skip_cache(&mut self, nbits: u8) {
136 self.br.bitbuf >>= nbits;
137 self.br.bits -= nbits;
139 fn read(&mut self, nbits: u8) -> BitReaderResult<u32> {
140 if nbits == 0 { return Ok(0); }
141 if nbits > 16 { return Err(BitReaderError::TooManyBitsRequested); }
142 if self.br.bits < nbits {
144 if self.br.bits < nbits { return Err(BitReaderError::BitstreamEnd); }
146 let ret = self.br.bitbuf & ((1 << nbits) - 1);
147 self.skip_cache(nbits);
150 fn read_bool(&mut self) -> BitReaderResult<bool> {
151 if self.br.bits == 0 {
153 if self.br.bits == 0 { return Err(BitReaderError::BitstreamEnd); }
155 let ret = (self.br.bitbuf & 1) != 0;
159 fn peek(&mut self, nbits: u8) -> u32 {
160 if nbits == 0 || nbits > 16 { return 0; }
161 if self.br.bits < nbits {
164 self.br.bitbuf & ((1 << nbits) - 1)
166 fn skip(&mut self, nbits: u32) -> BitReaderResult<()> {
167 if u32::from(self.br.bits) >= nbits {
168 self.skip_cache(nbits as u8);
174 fn align(&mut self) {
175 let b = self.br.bits & 7;
180 fn left(&self) -> isize {
181 ((self.src.len() as isize) - (self.br.pos as isize)) * 8 + (self.br.bits as isize)
185 impl<'a, S: Copy> CodebookReader<S> for CurrentSource<'a> {
186 fn read_cb(&mut self, cb: &Codebook<S>) -> CodebookResult<S> {
189 let mut lut_bits = cb.lut_bits;
190 let orig_br = self.br;
192 let lut_idx = (self.peek(lut_bits) as usize) + (idx as usize);
193 if cb.table[lut_idx] == TABLE_FILL_VALUE { return Err(CodebookError::InvalidCode); }
194 let bits = cb.table[lut_idx] & 0x7F;
195 esc = (cb.table[lut_idx] & 0x80) != 0;
196 idx = (cb.table[lut_idx] >> 8) as usize;
197 let skip_bits = if esc { u32::from(lut_bits) } else { bits };
198 if (skip_bits as isize) > self.left() {
201 return Err(CodebookError::MemoryError);
203 self.skip(skip_bits as u32).unwrap();
204 lut_bits = bits as u8;
215 StaticBlockInvLen(u32),
216 StaticBlockCopy(usize),
218 FixedBlockLengthExt(usize, u8),
219 FixedBlockDist(usize),
220 FixedBlockDistExt(usize, usize, u8),
221 FixedBlockCopy(usize, usize),
222 FixedBlockLiteral(u8),
228 DynCodeLengthsAdd(usize),
230 DynBlockLengthExt(usize, u8),
232 DynBlockDistExt(usize, usize, u8),
233 DynCopy(usize, usize),
238 ///! The decompressor for deflated streams (RFC 1951).
241 fix_len_cb: Codebook<u16>,
252 dyn_len_cb: Option<Codebook<u32>>,
253 dyn_lit_cb: Option<Codebook<u32>>,
254 dyn_dist_cb: Option<Codebook<u32>>,
255 len_lengths: [u8; 19],
256 all_lengths: [u8; NUM_LITERALS + NUM_DISTS],
260 const LENGTH_ADD_BITS: [u8; 29] = [
261 0, 0, 0, 0, 0, 0, 0, 0, 1, 1,
262 1, 1, 2, 2, 2, 2, 3, 3, 3, 3,
263 4, 4, 4, 4, 5, 5, 5, 5, 0
265 const LENGTH_BASE: [u16; 29] = [
266 3, 4, 5, 6, 7, 8, 9, 10, 11, 13,
267 15, 17, 19, 23, 27, 31, 35, 43, 51, 59,
268 67, 83, 99, 115, 131, 163, 195, 227, 258
270 const DIST_ADD_BITS: [u8; 30] = [
271 0, 0, 0, 0, 1, 1, 2, 2, 3, 3,
272 4, 4, 5, 5, 6, 6, 7, 7, 8, 8,
273 9, 9, 10, 10, 11, 11, 12, 12, 13, 13
275 const DIST_BASE: [u16; 30] = [
276 1, 2, 3, 4, 5, 7, 9, 13, 17, 25,
277 33, 49, 65, 97, 129, 193, 257, 385, 513, 769,
278 1025, 1537, 2049, 3073, 4097, 6145, 8193, 12289, 16385, 24577
280 const LEN_RECODE: [usize; 19] = [
281 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15
283 const REPEAT_BITS: [u8; 3] = [ 2, 3, 7 ];
284 const REPEAT_BASE: [u8; 3] = [ 3, 3, 11 ];
286 macro_rules! read_bits {
287 ($self: expr, $csrc: expr, $bits: expr) => ({
288 if $csrc.left() < $bits as isize {
290 return Err(DecompressError::ShortData);
292 $csrc.read($bits).unwrap()
296 macro_rules! read_cb {
297 ($self: expr, $csrc: expr, $cb: expr) => ({
298 let ret = $csrc.read_cb($cb);
299 if let Err(CodebookError::MemoryError) = ret {
301 return Err(DecompressError::ShortData);
306 $self.state = InflateState::End;
307 return Err(DecompressError::InvalidData);
314 ///! Creates a new instance of `Inflate` struct.
315 pub fn new() -> Self {
316 let mut cr = FixedLenCodeReader {};
317 let fix_len_cb = Codebook::new(&mut cr, CodebookMode::LSB).unwrap();
319 br: BitReaderState::default(),
327 state: InflateState::Start,
334 len_lengths: [0; 19],
335 all_lengths: [0; NUM_LITERALS + NUM_DISTS],
339 fn put_literal(&mut self, val: u8) {
340 self.buf[self.bpos] = val;
341 self.bpos = (self.bpos + 1) & (self.buf.len() - 1);
344 fn lz_copy(&mut self, offset: usize, len: usize, dst: &mut [u8]) -> DecompressResult<()> {
345 let mask = self.buf.len() - 1;
346 if offset > self.full_pos {
347 return Err(DecompressError::InvalidData);
349 let cstart = (self.bpos.wrapping_sub(offset)) & mask;
351 self.buf[(self.bpos + i) & mask] = self.buf[(cstart + i) & mask];
352 dst[i] = self.buf[(cstart + i) & mask];
354 self.bpos = (self.bpos + len) & mask;
355 self.full_pos += len;
358 ///! Reports whether decoder has finished decoding the input.
359 pub fn is_finished(&self) -> bool {
361 InflateState::End => true,
365 ///! Reports the current amount of bytes output into the destination buffer after the last run.
366 pub fn get_current_output_size(&self) -> usize { self.output_idx }
367 ///! Reports the total amount of bytes decoded so far.
368 pub fn get_total_output_size(&self) -> usize { self.bpos }
369 ///! Tries to decompress input data and write it to the output buffer.
371 ///! Since the decompressor can work with arbitrary input and output chunks its return value may have several meanings:
372 ///! * `Ok(len)` means the stream has been fully decoded and then number of bytes output into the destination buffer is returned.
373 ///! * [`DecompressError::ShortData`] means the input stream has been fully read but more data is needed.
374 ///! * [`DecompressError::OutputFull`] means the output buffer is full and should be flushed. Then decoding should continue on the same input block with `continue_block` parameter set to `true`.
376 ///! [`DecompressError::ShortData`]: ../enum.DecompressError.html#variant.ShortData
377 ///! [`DecompressError::OutputFull`]: ../enum.DecompressError.html#variant.OutputFull
378 #[allow(clippy::comparison_chain)]
379 pub fn decompress_data(&mut self, src: &[u8], dst: &mut [u8], continue_block: bool) -> DecompressResult<usize> {
380 if src.is_empty() || dst.is_empty() {
381 return Err(DecompressError::InvalidArgument);
383 let mut csrc = if !continue_block {
384 CurrentSource::new(src, self.br)
387 CurrentSource::reinit(src, self.br)
391 InflateState::Start | InflateState::BlockStart => {
392 if csrc.left() == 0 {
394 return Err(DecompressError::ShortData);
396 self.final_block = csrc.read_bool().unwrap();
397 self.state = InflateState::BlockMode;
399 InflateState::BlockMode => {
400 let bmode = read_bits!(self, csrc, 2);
404 self.state = InflateState::StaticBlockLen;
406 1 => { self.state = InflateState::FixedBlock; },
407 2 => { self.state = InflateState::DynBlockHlit; },
409 self.state = InflateState::End;
410 return Err(DecompressError::InvalidHeader);
414 InflateState::StaticBlockLen => {
415 let len = read_bits!(self, csrc, 16);
416 self.state = InflateState::StaticBlockInvLen(len);
418 InflateState::StaticBlockInvLen(len) => {
419 let inv_len = read_bits!(self, csrc, 16);
421 self.state = InflateState::End;
422 return Err(DecompressError::InvalidHeader);
424 self.state = InflateState::StaticBlockCopy(len as usize);
426 InflateState::StaticBlockCopy(len) => {
430 self.state = InflateState::StaticBlockCopy(len - i);
431 return Err(DecompressError::ShortData);
433 let val = csrc.read(8).unwrap() as u8;
434 self.put_literal(val);
436 self.state = InflateState::BlockStart;
438 InflateState::FixedBlock => {
439 let val = read_cb!(self, csrc, &self.fix_len_cb);
441 if self.output_idx >= dst.len() {
443 self.state = InflateState::FixedBlockLiteral(val as u8);
444 return Err(DecompressError::OutputFull);
446 self.put_literal(val as u8);
447 dst[self.output_idx] = val as u8;
448 self.output_idx += 1;
449 } else if val == 256 {
450 if self.final_block {
451 self.state = InflateState::End;
452 return Ok(self.output_idx);
454 self.state = InflateState::BlockStart;
457 let len_idx = (val - 257) as usize;
458 if len_idx >= LENGTH_BASE.len() {
459 self.state = InflateState::End;
460 return Err(DecompressError::InvalidData);
462 let len_bits = LENGTH_ADD_BITS[len_idx];
463 let add_base = LENGTH_BASE[len_idx] as usize;
465 self.state = InflateState::FixedBlockLengthExt(add_base, len_bits);
467 self.state = InflateState::FixedBlockDist(add_base);
471 InflateState::FixedBlockLiteral(sym) => {
472 if self.output_idx >= dst.len() {
474 return Err(DecompressError::OutputFull);
476 self.put_literal(sym);
477 dst[self.output_idx] = sym;
478 self.output_idx += 1;
479 self.state = InflateState::FixedBlock;
481 InflateState::FixedBlockLengthExt(base, bits) => {
482 let add = read_bits!(self, csrc, bits) as usize;
483 self.state = InflateState::FixedBlockDist(base + add);
485 InflateState::FixedBlockDist(length) => {
486 let dist_idx = reverse_bits(read_bits!(self, csrc, 5), 5) as usize;
487 if dist_idx >= DIST_BASE.len() {
488 self.state = InflateState::End;
489 return Err(DecompressError::InvalidData);
491 let dist_bits = DIST_ADD_BITS[dist_idx];
492 let dist_base = DIST_BASE[dist_idx] as usize;
494 self.state = InflateState::FixedBlockCopy(length, dist_base);
496 self.state = InflateState::FixedBlockDistExt(length, dist_base, dist_bits);
499 InflateState::FixedBlockDistExt(length, base, bits) => {
500 let add = read_bits!(self, csrc, bits) as usize;
501 self.state = InflateState::FixedBlockCopy(length, base + add);
503 InflateState::FixedBlockCopy(length, dist) => {
504 if self.output_idx + length > dst.len() {
505 let copy_size = dst.len() - self.output_idx;
506 let ret = self.lz_copy(dist, copy_size, &mut dst[self.output_idx..]);
508 self.state = InflateState::End;
509 return Err(DecompressError::InvalidData);
511 self.output_idx += copy_size;
513 self.state = InflateState::FixedBlockCopy(length - copy_size, dist);
514 return Err(DecompressError::OutputFull);
516 let ret = self.lz_copy(dist, length, &mut dst[self.output_idx..]);
518 self.state = InflateState::End;
519 return Err(DecompressError::InvalidData);
521 self.output_idx += length;
522 self.state = InflateState::FixedBlock;
524 InflateState::DynBlockHlit => {
525 self.hlit = (read_bits!(self, csrc, 5) as usize) + 257;
526 if self.hlit >= 287 {
527 self.state = InflateState::End;
528 return Err(DecompressError::InvalidHeader);
530 self.state = InflateState::DynBlockHdist;
532 InflateState::DynBlockHdist => {
533 self.hdist = (read_bits!(self, csrc, 5) as usize) + 1;
534 self.state = InflateState::DynBlockHclen;
536 InflateState::DynBlockHclen => {
537 let hclen = (read_bits!(self, csrc, 4) as usize) + 4;
538 self.cur_len_idx = 0;
539 self.len_lengths = [0; 19];
540 self.all_lengths = [0; NUM_LITERALS + NUM_DISTS];
541 self.state = InflateState::DynLengths(hclen);
543 InflateState::DynLengths(len) => {
547 self.state = InflateState::DynLengths(len - i);
548 return Err(DecompressError::ShortData);
550 self.len_lengths[LEN_RECODE[self.cur_len_idx]] = csrc.read(3).unwrap() as u8;
551 self.cur_len_idx += 1;
553 let mut len_codes = [ShortCodebookDesc { code: 0, bits: 0 }; 19];
554 lengths_to_codes(&self.len_lengths, &mut len_codes)?;
555 let mut cr = ShortCodebookDescReader::new(len_codes.to_vec());
556 let ret = Codebook::new(&mut cr, CodebookMode::LSB);
558 self.state = InflateState::End;
559 return Err(DecompressError::InvalidHeader);
561 self.dyn_len_cb = Some(ret.unwrap());
562 self.cur_len_idx = 0;
563 self.state = InflateState::DynCodeLengths;
565 InflateState::DynCodeLengths => {
566 if let Some(ref len_cb) = self.dyn_len_cb {
567 while self.cur_len_idx < self.hlit + self.hdist {
568 let ret = csrc.read_cb(len_cb);
569 let val = match ret {
571 Err(CodebookError::MemoryError) => {
573 return Err(DecompressError::ShortData);
576 self.state = InflateState::End;
577 return Err(DecompressError::InvalidHeader);
581 self.all_lengths[self.cur_len_idx] = val as u8;
582 self.cur_len_idx += 1;
584 let idx = (val as usize) - 16;
586 self.state = InflateState::End;
587 return Err(DecompressError::InvalidHeader);
589 self.state = InflateState::DynCodeLengthsAdd(idx);
593 let (lit_lengths, dist_lengths) = self.all_lengths.split_at(self.hlit);
595 let mut lit_codes = [ShortCodebookDesc { code: 0, bits: 0 }; NUM_LITERALS];
596 lengths_to_codes(&lit_lengths, &mut lit_codes)?;
597 let mut cr = ShortCodebookDescReader::new(lit_codes.to_vec());
598 let ret = Codebook::new(&mut cr, CodebookMode::LSB);
599 if ret.is_err() { return Err(DecompressError::InvalidHeader); }
600 self.dyn_lit_cb = Some(ret.unwrap());
602 let mut dist_codes = [ShortCodebookDesc { code: 0, bits: 0 }; NUM_DISTS];
603 lengths_to_codes(&dist_lengths[..self.hdist], &mut dist_codes)?;
604 let mut cr = ShortCodebookDescReader::new(dist_codes.to_vec());
605 let ret = Codebook::new(&mut cr, CodebookMode::LSB);
606 if ret.is_err() { return Err(DecompressError::InvalidHeader); }
607 self.dyn_dist_cb = Some(ret.unwrap());
609 self.state = InflateState::DynBlock;
614 InflateState::DynCodeLengthsAdd(mode) => {
615 let base = REPEAT_BASE[mode] as usize;
616 let bits = REPEAT_BITS[mode];
617 let len = base + read_bits!(self, csrc, bits) as usize;
618 if self.cur_len_idx + len > self.hlit + self.hdist {
619 self.state = InflateState::End;
620 return Err(DecompressError::InvalidHeader);
622 let rpt = if mode == 0 {
623 if self.cur_len_idx == 0 {
624 self.state = InflateState::End;
625 return Err(DecompressError::InvalidHeader);
627 self.all_lengths[self.cur_len_idx - 1]
632 self.all_lengths[self.cur_len_idx] = rpt;
633 self.cur_len_idx += 1;
635 self.state = InflateState::DynCodeLengths;
637 InflateState::DynBlock => {
638 if let Some(ref lit_cb) = self.dyn_lit_cb {
639 let val = read_cb!(self, csrc, lit_cb);
641 if self.output_idx >= dst.len() {
643 self.state = InflateState::DynBlockLiteral(val as u8);
644 return Err(DecompressError::OutputFull);
646 self.put_literal(val as u8);
647 dst[self.output_idx] = val as u8;
648 self.output_idx += 1;
649 } else if val == 256 {
650 if self.final_block {
651 self.state = InflateState::End;
652 return Ok(self.output_idx);
654 self.state = InflateState::BlockStart;
657 let len_idx = (val - 257) as usize;
658 if len_idx >= LENGTH_BASE.len() {
659 self.state = InflateState::End;
660 return Err(DecompressError::InvalidData);
662 let len_bits = LENGTH_ADD_BITS[len_idx];
663 let add_base = LENGTH_BASE[len_idx] as usize;
665 self.state = InflateState::DynBlockLengthExt(add_base, len_bits);
667 self.state = InflateState::DynBlockDist(add_base);
674 InflateState::DynBlockLiteral(sym) => {
675 if self.output_idx >= dst.len() {
677 return Err(DecompressError::OutputFull);
679 self.put_literal(sym);
680 dst[self.output_idx] = sym;
681 self.output_idx += 1;
682 self.state = InflateState::DynBlock;
684 InflateState::DynBlockLengthExt(base, bits) => {
685 let add = read_bits!(self, csrc, bits) as usize;
686 self.state = InflateState::DynBlockDist(base + add);
688 InflateState::DynBlockDist(length) => {
689 if let Some(ref dist_cb) = self.dyn_dist_cb {
690 let dist_idx = read_cb!(self, csrc, dist_cb) as usize;
691 if dist_idx >= DIST_BASE.len() {
692 self.state = InflateState::End;
693 return Err(DecompressError::InvalidData);
695 let dist_bits = DIST_ADD_BITS[dist_idx];
696 let dist_base = DIST_BASE[dist_idx] as usize;
698 self.state = InflateState::DynCopy(length, dist_base);
700 self.state = InflateState::DynBlockDistExt(length, dist_base, dist_bits);
706 InflateState::DynBlockDistExt(length, base, bits) => {
707 let add = read_bits!(self, csrc, bits) as usize;
708 self.state = InflateState::DynCopy(length, base + add);
710 InflateState::DynCopy(length, dist) => {
711 if self.output_idx + length > dst.len() {
712 let copy_size = dst.len() - self.output_idx;
713 let ret = self.lz_copy(dist, copy_size, &mut dst[self.output_idx..]);
715 self.state = InflateState::End;
716 return Err(DecompressError::InvalidData);
718 self.output_idx += copy_size;
720 self.state = InflateState::DynCopy(length - copy_size, dist);
721 return Err(DecompressError::OutputFull);
723 let ret = self.lz_copy(dist, length, &mut dst[self.output_idx..]);
725 self.state = InflateState::End;
726 return Err(DecompressError::InvalidData);
728 self.output_idx += length;
729 self.state = InflateState::DynBlock;
731 InflateState::End => {
737 ///! Decompresses input data into output returning the uncompressed data length.
738 pub fn uncompress(src: &[u8], dst: &mut [u8]) -> DecompressResult<usize> {
739 let mut inflate = Self::new();
740 let off = if src.len() > 2 && src[0] == 0x78 && (src[1] != 0 && ((src[1] - 1) % 31) == 0) { 2 } else { 0 };
741 inflate.decompress_data(&src[off..], dst, false)
745 impl Default for Inflate {
746 fn default() -> Self {
751 fn lengths_to_codes(lens: &[u8], codes: &mut [ShortCodebookDesc]) -> DecompressResult<()> {
752 let mut bits = [0u32; 32];
753 let mut pfx = [0u32; 33];
754 for len in lens.iter() {
755 let len = *len as usize;
756 if len >= bits.len() {
757 return Err(DecompressError::InvalidHeader);
763 for i in 0..bits.len() {
764 code = (code + bits[i]) << 1;
768 for (len, codes) in lens.iter().zip(codes.iter_mut()) {
769 let len = *len as usize;
771 let bits = len as u8;
772 *codes = ShortCodebookDesc { code: reverse_bits(pfx[len], bits), bits };
775 *codes = ShortCodebookDesc { code: 0, bits: 0 };
788 #[allow(clippy::unreadable_literal)]
790 let mut tab = [0u32; 256];
792 let mut c = i as u32;
795 c = 0xEDB88320 ^ (c >> 1);
804 fn update_crc(&mut self, src: &[u8]) {
805 let mut c = !self.crc;
806 for el in src.iter() {
807 c = self.tab[((c ^ u32::from(*el)) & 0xFF) as usize] ^ (c >> 8);
813 ///! Decodes input data in gzip file format (RFC 1952) returning a vector containing decoded data.
814 pub fn gzip_decode(br: &mut ByteReader, skip_crc: bool) -> DecompressResult<Vec<u8>> {
815 const FLAG_HCRC: u8 = 0x02;
816 const FLAG_EXTRA: u8 = 0x04;
817 const FLAG_NAME: u8 = 0x08;
818 const FLAG_COMMENT: u8 = 0x10;
820 let id1 = br.read_byte()?;
821 let id2 = br.read_byte()?;
822 let cm = br.read_byte()?;
823 let flg = br.read_byte()?;
824 let _mtime = br.read_u32le()?;
825 let _xfl = br.read_byte()?;
826 let _os = br.read_byte()?;
827 if id1 != 0x1F || id2 != 0x8B || cm != 8 {
828 return Err(DecompressError::InvalidHeader);
831 if (flg & FLAG_EXTRA) != 0 {
832 let xlen = br.read_u16le()? as usize;
835 if (flg & FLAG_NAME) != 0 {
837 let b = br.read_byte()?;
843 if (flg & FLAG_COMMENT) != 0 {
845 let b = br.read_byte()?;
851 let _hcrc = if (flg & FLAG_HCRC) != 0 {
856 if (flg & 0xE0) != 0 {
857 return Err(DecompressError::Unsupported);
860 let mut output: Vec<u8> = Vec::new();
861 let mut tail = [0u8; 8];
862 let mut inblk = [0u8; 1024];
863 let mut oblk = [0u8; 4096];
864 let mut inflate = Inflate::new();
865 let mut checker = GzipCRC32::new();
868 let ret = br.read_buf_some(&mut inblk);
869 if let Err(ByteIOError::EOF) = ret {
872 let inlen = match ret {
874 Err(_) => return Err(DecompressError::IOError),
876 let mut repeat = false;
878 let ret = inflate.decompress_data(&inblk[..inlen], &mut oblk, repeat);
881 checker.update_crc(&oblk[..outlen]);
882 output.extend_from_slice(&oblk[..outlen]);
885 Err(DecompressError::ShortData) => {
888 Err(DecompressError::OutputFull) => {
890 checker.update_crc(&oblk);
891 output.extend_from_slice(&oblk);
898 // Save last 8 bytes for CRC and size.
900 tail.copy_from_slice(&inblk[inlen - 8..][..8]);
902 let shift_len = 8 - inlen;
903 for i in 0..shift_len {
904 tail[i] = tail[i + inlen];
906 for i in shift_len..8 {
907 tail[i] = inblk[i - shift_len];
912 if !inflate.is_finished() { println!("???"); }
913 let crc = read_u32le(&tail[0..4])?;
914 let size = read_u32le(&tail[4..8])?;
915 if size != (output.len() as u32) {
916 return Err(DecompressError::CRCError);
918 if crc != checker.crc {
919 return Err(DecompressError::CRCError);
926 #[derive(Clone,Copy,Default)]
934 const TOKEN_EOB: Token = Token { sym: 256, distsym: 0, len: 0, dist: 0 };
937 fn from_literal(sym: u8) -> Self {
945 fn from_match(dist: u16, len: u16) -> Self {
946 let sym = match len {
947 3..= 10 => 257 + len - 3,
948 11..= 18 => 265 + (len - 11) / 2,
949 19..= 34 => 269 + (len - 19) / 4,
950 35..= 66 => 273 + (len - 35) / 8,
951 67..=130 => 277 + (len - 67) / 16,
952 131..=257 => 281 + (len - 131) / 32,
955 let distsym = if dist <= 4 {
958 let bits = 16 - (dist - 1).leading_zeros();
959 (bits as u8) * 2 - 2 + if ((dist - 1) & (1 << (bits - 2))) != 0 { 1 } else { 0 }
962 sym, distsym, len, dist
967 fn add_codes(lens: &[u8], stats: &mut [u32], toks: &mut Vec<(u8, u8)>) {
971 for &len in lens.iter() {
977 let run = lcount.min(138);
979 toks.push((18, run - 11));
984 toks.push((17, lcount - 3));
993 let run = lcount.min(6);
995 toks.push((16, run - 3));
999 stats[last as usize] += 1;
1000 toks.push((last, 0));
1003 stats[len as usize] += 1;
1004 toks.push((len, 0));
1012 let run = lcount.min(138);
1014 toks.push((18, run - 11));
1019 toks.push((17, lcount - 3));
1022 for _ in 0..lcount {
1028 let run = lcount.min(6);
1030 toks.push((16, run - 3));
1033 for _ in 0..lcount {
1034 stats[last as usize] += 1;
1035 toks.push((last, 0));
1041 ///! Deflate stream writer.
1042 pub struct DeflateWriter {
1048 impl DeflateWriter {
1049 ///! Creates a new instance of `DeflateWriter` for a provided output.
1050 pub fn new(dst: Vec<u8>) -> Self {
1057 fn align(&mut self) {
1058 if (self.bits & 7) != 0 {
1059 self.bits += 8 - (self.bits & 7);
1062 fn flush(&mut self) {
1063 while self.bits >= 8 {
1064 self.dst.push(self.bbuf as u8);
1069 fn write(&mut self, val: u16, len: u8) {
1071 self.bbuf |= u32::from(val) << self.bits;
1074 ///! Finishes writing the stream and returns the output vector.
1075 pub fn end(mut self) -> Vec<u8> {
1078 self.dst.push(self.bbuf as u8);
1083 fn write_codes(&mut self, codes: &CodeHuff, dists: &DistHuff) {
1084 let mut stats = [0u32; 19];
1085 let mut toks = Vec::with_capacity(NUM_LITERALS + NUM_DISTS);
1086 let mut cw = [0u16; 19];
1087 let mut cl = [0u8; 19];
1090 add_codes(&codes.lens[..codes.num_codes], &mut stats, &mut toks);
1091 add_codes(&dists.lens[..dists.num_codes], &mut stats, &mut toks);
1093 gen_tree(&mut cw, &mut cl, &mut nc, &mut stats, 7);
1096 for &idx in LEN_RECODE.iter().rev() {
1106 self.write((nc - 4) as u16, 4);
1107 for &idx in LEN_RECODE.iter().take(nc) {
1108 self.write(u16::from(cl[idx]), 3);
1110 for &(sym, add) in toks.iter() {
1111 self.write(cw[sym as usize], cl[sym as usize]);
1113 16 => self.write(u16::from(add), 2),
1114 17 => self.write(u16::from(add), 3),
1115 18 => self.write(u16::from(add), 7),
1120 fn write_tokens(&mut self, src: &[Token], codes: &CodeHuff, dists: &DistHuff) {
1121 for &tok in src.iter() {
1122 self.write(codes.codes[tok.sym as usize], codes.lens[tok.sym as usize]);
1124 self.write_len_bits(tok.len);
1125 self.write(dists.codes[tok.distsym as usize], dists.lens[tok.distsym as usize]);
1126 self.write_dist_bits(tok.dist);
1130 fn write_len_bits(&mut self, len: u16) {
1132 if llen >= 8 && llen < 255 {
1133 let bits = (16 - llen.leading_zeros() - 3) as u8;
1134 self.write(llen & ((1 << bits) - 1), bits);
1137 fn write_dist_bits(&mut self, dist: u16) {
1138 let ddist = dist - 1;
1140 let bits = (16 - ddist.leading_zeros() - 2) as u8;
1141 self.write(ddist & ((1 << bits) - 1), bits);
1148 stats: [u32; NUM_LITERALS],
1149 codes: [u16; NUM_LITERALS],
1150 lens: [u8; NUM_LITERALS],
1155 fn new(is_fixed: bool) -> Self {
1158 stats: [0; NUM_LITERALS],
1159 codes: [0; NUM_LITERALS],
1160 lens: [0; NUM_LITERALS],
1161 num_codes: NUM_LITERALS,
1164 fn make_codes(&mut self, src: &[Token]) {
1167 self.codes[i] = reverse_bits((i + 0x30) as u32, 8) as u16;
1170 for i in 144..=255 {
1171 self.codes[i] = reverse_bits((i + 0x100) as u32, 9) as u16;
1174 for i in 256..=279 {
1175 self.codes[i] = reverse_bits((i & 0x1F) as u32, 7) as u16;
1178 for i in 280..NUM_LITERALS {
1179 self.codes[i] = reverse_bits((i - 280 + 0xC0) as u32, 8) as u16;
1183 for &tok in src.iter() {
1184 self.stats[tok.sym as usize] += 1;
1186 gen_tree(&mut self.codes, &mut self.lens, &mut self.num_codes, &mut self.stats, 15);
1187 if self.num_codes < 257 {
1188 self.num_codes = 257;
1196 stats: [u32; NUM_DISTS],
1197 codes: [u16; NUM_DISTS],
1198 lens: [u8; NUM_DISTS],
1203 fn new(is_fixed: bool) -> Self {
1206 stats: [0; NUM_DISTS],
1207 codes: [0; NUM_DISTS],
1208 lens: [0; NUM_DISTS],
1209 num_codes: NUM_DISTS,
1212 fn make_codes(&mut self, src: &[Token]) {
1214 for i in 0..NUM_DISTS {
1215 self.codes[i] = reverse_bits(i as u32, 5) as u16;
1219 for &tok in src.iter() {
1221 self.stats[tok.distsym as usize] += 1;
1224 gen_tree(&mut self.codes, &mut self.lens, &mut self.num_codes, &mut self.stats, 15);
1225 if self.num_codes < 1 {
1232 #[derive(Clone,Copy,Default)]
1240 const NODE_SYM: u16 = 65500;
1243 nodes: [Node; NUM_LITERALS * 2],
1250 nodes: [Node::default(); NUM_LITERALS * 2],
1254 fn insert(&mut self, val: Node) {
1255 let mut idx = self.nnodes;
1256 for (i, nn) in self.nodes[..self.nnodes].iter().enumerate() {
1262 if idx < self.nnodes {
1263 for i in (idx..self.nnodes).rev() {
1264 self.nodes[i + 1] = self.nodes[i];
1267 self.nodes[idx] = val;
1270 fn trim(&mut self) {
1271 let mut start = self.nnodes;
1272 for (i, n) in self.nodes[..self.nnodes].iter().enumerate() {
1279 for i in 0..self.nnodes - start {
1280 self.nodes[i] = self.nodes[i + start];
1282 self.nnodes -= start;
1285 fn build(&mut self) {
1286 if self.nnodes == 1 {
1287 self.nodes[0].w = 1;
1291 while start + 1 < self.nnodes {
1292 let nn1 = self.nodes[start];
1293 let nn2 = self.nodes[start + 1];
1298 idx1: (start + 1) as u16,
1300 self.nodes[start].w = 0;
1301 self.nodes[start + 1].w = 0;
1305 if self.nnodes > 1 {
1306 self.assign_len(self.nnodes - 1, 0);
1309 fn assign_len(&mut self, idx: usize, len: u16) {
1310 if self.nodes[idx].sym == NODE_SYM {
1311 self.assign_len(self.nodes[idx].idx0 as usize, len + 1);
1312 self.assign_len(self.nodes[idx].idx1 as usize, len + 1);
1314 self.nodes[idx].w = len;
1319 fn gen_tree(codes: &mut [u16], lens: &mut [u8], num_codes: &mut usize, stats: &mut [u32], max_bits: u8) {
1321 for &w in stats.iter() {
1330 while tot_w > (1 << max_bits) {
1331 for w in stats.iter_mut() {
1335 for &w in stats.iter() {
1339 let mut tree = Tree::new();
1340 for (sym, &w) in stats.iter().enumerate() {
1341 tree.insert(Node{ sym: sym as u16, w: w as u16, idx0: 64000, idx1: 64000 });
1346 for n in tree.nodes[..tree.nnodes].iter() {
1347 if n.sym != NODE_SYM {
1348 lens[n.sym as usize] = n.w as u8;
1351 lengths_to_codes16(lens, codes);
1352 let mut sz = codes.len();
1353 for &len in lens.iter().rev() {
1362 fn lengths_to_codes16(lens: &[u8], codes: &mut [u16]) {
1363 let mut bits = [0u32; 32];
1364 let mut pfx = [0u32; 33];
1365 for len in lens.iter() {
1366 let len = *len as usize;
1371 for i in 0..bits.len() {
1372 code = (code + bits[i]) << 1;
1376 for (len, codes) in lens.iter().zip(codes.iter_mut()) {
1377 let len = *len as usize;
1379 let bits = len as u8;
1380 *codes = reverse_bits(pfx[len], bits) as u16;
1389 fn parse(&mut self, src: &[u8], dst: &mut Vec<Token>);
1393 impl LZParse for NoParser {
1394 fn parse(&mut self, src: &[u8], dst: &mut Vec<Token>) {
1395 dst.reserve(src.len());
1396 for &b in src.iter() {
1397 dst.push(Token::from_literal(b));
1399 dst.push(TOKEN_EOB);
1403 fn check_match(src1: &[u8], src2: &[u8]) -> u16 {
1405 for (&a, &b) in src1.iter().zip(src2.iter()) {
1414 const HASH_SIZE: usize = 4096;
1415 const MAX_MATCH_LEN: usize = 258;
1416 const WINDOW_SIZE: usize = 32768 - MAX_MATCH_LEN;
1417 const NONEXT: usize = WINDOW_SIZE * 2;
1419 struct MatchFinder<'a> {
1422 hstart: [usize; HASH_SIZE],
1423 hend: [usize; HASH_SIZE],
1424 hnext: [usize; WINDOW_SIZE * 3],
1427 impl<'a> MatchFinder<'a> {
1428 fn new(src: &'a [u8]) -> Self {
1429 let mut obj = Self {
1432 hstart: [0; HASH_SIZE],
1433 hend: [0; HASH_SIZE],
1434 hnext: [0; WINDOW_SIZE * 3],
1439 fn hash(src: &[u8]) -> usize {
1441 (((u16::from(src[0]) << 10) ^ (u16::from(src[1]) << 5) ^ u16::from(src[2])) & ((HASH_SIZE as u16) - 1)) as usize
1443 fn build_hash(&mut self) {
1444 for el in self.hstart.iter_mut() { *el = NONEXT; }
1445 for el in self.hend.iter_mut() { *el = NONEXT; }
1446 for el in self.hnext.iter_mut() { *el = NONEXT; }
1447 if self.pos + 3 >= self.src.len() {
1450 let end = (self.src.len() - 3).min(self.pos + NONEXT);
1451 for i in (self.pos .. end).rev() {
1452 let key = Self::hash(&self.src[i..]);
1453 if self.hstart[key] == NONEXT {
1454 self.hstart[key] = i;
1456 self.hnext[key] = NONEXT;
1458 self.hnext[self.hend[key]] = i;
1463 fn find_match(&mut self) -> (u16, u16) {
1464 if self.pos == 0 || self.pos + 3 > self.src.len() {
1467 let key = Self::hash(&self.src[self.pos..]) as usize;
1469 let mut best_pos = 0;
1470 let mut best_len = 0;
1471 let mut idx = self.hstart[key];
1472 while idx != NONEXT && idx + WINDOW_SIZE > self.pos {
1474 let cur_len = check_match(&self.src[self.pos..], &self.src[idx..]);
1475 if cur_len > best_len {
1477 best_pos = self.pos - idx;
1478 if best_len >= (MAX_MATCH_LEN as u16) {
1479 return (best_pos as u16, MAX_MATCH_LEN as u16);
1483 idx = self.hnext[idx];
1485 (best_pos as u16, best_len)
1487 fn find_all_matches(&mut self, dists: &mut [u16; MAX_MATCH_LEN + 1]) {
1488 if self.pos == 0 || self.pos + 3 > self.src.len() {
1491 let key = Self::hash(&self.src[self.pos..]) as usize;
1492 let mut idx = self.hstart[key];
1493 while idx != NONEXT && idx + WINDOW_SIZE > self.pos {
1495 let cur_len = (check_match(&self.src[self.pos..], &self.src[idx..]) as usize).min(MAX_MATCH_LEN);
1496 if cur_len > 0 && dists[cur_len] == 0 {
1497 dists[cur_len] = (self.pos - idx) as u16;
1500 idx = self.hnext[idx];
1503 fn advance(&mut self, num: usize) {
1506 if self.pos >= NONEXT {
1507 let (_, tail) = self.src.split_at(self.pos - WINDOW_SIZE);
1509 self.pos = WINDOW_SIZE;
1512 for (start, end) in self.hstart.iter_mut().zip(self.hend.iter_mut()) {
1513 let mut idx = *start;
1514 while idx != NONEXT && idx + WINDOW_SIZE < self.pos {
1515 idx = self.hnext[idx];
1524 fn get_sym(&self) -> u8 { self.src[self.pos] }
1525 fn is_end(&self) -> bool { self.pos >= self.src.len() }
1528 struct GreedyParser {}
1529 impl LZParse for GreedyParser {
1530 fn parse(&mut self, src: &[u8], dst: &mut Vec<Token>) {
1531 dst.reserve(src.len());
1533 let mut matcher = MatchFinder::new(src);
1534 while !matcher.is_end() {
1535 let (best_pos, best_len) = matcher.find_match();
1538 dst.push(Token::from_match(best_pos, best_len));
1539 matcher.advance(best_len as usize);
1541 dst.push(Token::from_literal(matcher.get_sym()));
1545 dst.push(TOKEN_EOB);
1549 struct LazyParser {}
1550 impl LZParse for LazyParser {
1551 fn parse(&mut self, src: &[u8], dst: &mut Vec<Token>) {
1552 dst.reserve(src.len());
1554 let mut matcher = MatchFinder::new(src);
1555 while !matcher.is_end() {
1556 let (best_pos, best_len) = matcher.find_match();
1558 let last_sym = matcher.get_sym();
1560 if !matcher.is_end() {
1561 let (best_pos1, best_len1) = matcher.find_match();
1562 if best_len1 > best_len + 1 {
1563 dst.push(Token::from_literal(last_sym));
1564 dst.push(Token::from_match(best_pos1, best_len1));
1565 matcher.advance(best_len1 as usize);
1569 dst.push(Token::from_match(best_pos, best_len));
1570 matcher.advance((best_len - 1) as usize);
1572 dst.push(Token::from_literal(matcher.get_sym()));
1576 dst.push(TOKEN_EOB);
1580 #[derive(Clone,Copy)]
1587 impl Default for TNode {
1588 fn default() -> Self {
1590 price: std::u32::MAX,
1597 struct OptimalParser {
1598 trellis: Vec<TNode>,
1600 impl OptimalParser {
1601 fn new() -> Self { Self::default() }
1602 fn sym_price(_sym: u8) -> u32 { 9 }
1603 fn match_price(dist: u16, _len: u16) -> u32 {
1607 let bits = 16 - (dist - 1).leading_zeros();
1612 impl Default for OptimalParser {
1613 fn default() -> Self {
1615 trellis: Vec::with_capacity(WINDOW_SIZE),
1619 impl LZParse for OptimalParser {
1620 fn parse(&mut self, src: &[u8], dst: &mut Vec<Token>) {
1622 dst.push(TOKEN_EOB);
1625 dst.reserve(src.len());
1627 self.trellis.truncate(0);
1628 self.trellis.reserve(src.len() + 1);
1629 for _ in 0..=src.len() {
1630 self.trellis.push(TNode::default());
1632 self.trellis[0].price = 0;
1634 let mut matcher = MatchFinder::new(src);
1635 for i in 0..self.trellis.len() - 1 {
1636 let mut dists = [0; MAX_MATCH_LEN + 1];
1637 matcher.find_all_matches(&mut dists);
1639 let sym = matcher.get_sym();
1640 let lprice = Self::sym_price(sym) + self.trellis[i].price;
1641 if self.trellis[i + 1].price > lprice {
1642 self.trellis[i + 1].price = lprice;
1643 self.trellis[i + 1].link = i;
1645 for (len, &dist) in dists.iter().enumerate() {
1647 let mprice = Self::match_price(dist, len as u16) + self.trellis[i].price;
1648 if self.trellis[i + len].price > mprice {
1649 self.trellis[i + len].price = mprice;
1650 self.trellis[i + len].link = i;
1651 self.trellis[i].dist = dist;
1658 let mut idx = self.trellis.len() - 1;
1659 let mut nidx = self.trellis[idx].link;
1663 nidx = self.trellis[idx].link;
1664 self.trellis[idx].link = oidx;
1668 while idx < self.trellis.len() - 1 {
1669 let len = self.trellis[idx].link - idx;
1671 dst.push(Token::from_literal(src[idx]));
1673 dst.push(Token::from_match(self.trellis[idx].dist, len as u16));
1675 idx = self.trellis[idx].link;
1678 dst.push(TOKEN_EOB);
1682 ///! Deflate compression mode.
1683 #[derive(Clone,Copy,Debug,PartialEq)]
1684 pub enum DeflateMode {
1685 ///! No compression.
1687 ///! Fast compression.
1689 ///! Still fast but better compression.
1691 ///! Slow but the best compression.
1695 #[derive(Clone,Copy,Debug,PartialEq)]
1702 const MAX_BLOCK_SIZE: usize = 65535;
1704 ///! Deflate stream compressor.
1705 pub struct Deflate {
1708 srcbuf: [u8; MAX_BLOCK_SIZE],
1713 parser: Box<dyn LZParse>,
1717 ///! Creates a new instance of `Deflate`.
1718 pub fn new(mode: DeflateMode) -> Self {
1719 let (mode, parser) = match mode {
1720 DeflateMode::NoCompr => (Mode::Copy, Box::new(NoParser{}) as Box<dyn LZParse>),
1721 DeflateMode::Fast => (Mode::Fixed, Box::new(GreedyParser{}) as Box<dyn LZParse>),
1722 DeflateMode::Better => (Mode::Dynamic, Box::new(LazyParser{}) as Box<dyn LZParse>),
1723 DeflateMode::Best => (Mode::Dynamic, Box::new(OptimalParser::new()) as Box<dyn LZParse>),
1727 tokens: Vec::with_capacity(MAX_BLOCK_SIZE),
1728 srcbuf: [0; MAX_BLOCK_SIZE],
1735 ///! Writes zlib stream header.
1736 pub fn write_zlib_header(&mut self, wr: &mut DeflateWriter) {
1739 let level = match self.mode {
1741 Mode::Fixed => 0x5E,
1742 Mode::Dynamic => 0x9C,
1743 // 0xDA for the strongest one
1746 self.zlib_mode = true;
1748 fn write_zlib_footer(&self, wr: &mut DeflateWriter) {
1750 wr.write((self.sum2 >> 8) as u16, 8);
1751 wr.write((self.sum2 & 0xFF) as u16, 8);
1752 wr.write((self.sum1 >> 8) as u16, 8);
1753 wr.write((self.sum1 & 0xFF) as u16, 8);
1755 ///! Queues data for compression.
1757 ///! The data might be not actually compressed until [`compress_end`] is called.
1759 ///! [`compress_end`]: ./struct.Deflate.html#method.compress_end
1760 pub fn compress(&mut self, src: &[u8], wr: &mut DeflateWriter) {
1762 while !src.is_empty() {
1763 let clen = src.len().min(MAX_BLOCK_SIZE - self.ssize);
1764 let (head, tail) = src.split_at(clen);
1766 self.srcbuf[self.ssize..][..clen].copy_from_slice(head);
1768 if self.ssize == MAX_BLOCK_SIZE {
1769 self.do_block(wr, false);
1773 ///! Tells the encoder to finish data compression.
1775 ///! Complete data will be output after this call.
1776 pub fn compress_end(&mut self, wr: &mut DeflateWriter) {
1778 self.do_block(wr, true);
1782 wr.write(0, 7); //static EOF sym
1785 self.write_zlib_footer(wr);
1788 fn do_block(&mut self, wr: &mut DeflateWriter, final_block: bool) {
1789 const CRC_BASE: u32 = 65521;
1790 for &b in self.srcbuf[..self.ssize].iter() {
1791 self.sum1 += u32::from(b);
1792 if self.sum1 >= CRC_BASE {
1793 self.sum1 -= CRC_BASE;
1795 self.sum2 += self.sum1;
1796 if self.sum2 >= CRC_BASE {
1797 self.sum2 -= CRC_BASE;
1802 wr.write(final_block as u16, 1);
1805 wr.write(self.ssize as u16, 16);
1806 wr.write(!self.ssize as u16, 16);
1807 for &b in self.srcbuf[..self.ssize].iter() {
1808 wr.write(u16::from(b), 8);
1812 wr.write(final_block as u16, 1);
1814 self.tokens.truncate(0);
1815 self.parser.parse(&self.srcbuf[..self.ssize], &mut self.tokens);
1816 let mut codes = CodeHuff::new(true);
1817 codes.make_codes(&self.tokens);
1818 let mut dists = DistHuff::new(true);
1819 dists.make_codes(&self.tokens);
1820 wr.write_tokens(&self.tokens, &codes, &dists);
1823 wr.write(final_block as u16, 1);
1825 self.tokens.truncate(0);
1826 self.parser.parse(&self.srcbuf[..self.ssize], &mut self.tokens);
1827 let mut codes = CodeHuff::new(false);
1828 codes.make_codes(&self.tokens);
1829 let mut dists = DistHuff::new(false);
1830 dists.make_codes(&self.tokens);
1831 wr.write((codes.num_codes - 257) as u16, 5);
1832 wr.write((dists.num_codes - 1) as u16, 5);
1833 wr.write_codes(&codes, &dists);
1834 wr.write_tokens(&self.tokens, &codes, &dists);
1846 fn test_inflate1() {
1847 const TEST_DATA: &[u8] = &[
1848 0xF3, 0x48, 0xCD, 0xC9, 0xC9, 0xD7, 0x51, 0x28,
1849 0xCF, 0x2F, 0xCA, 0x49, 0x51, 0x04, 0x00 ];
1850 const TEST_REF: &[u8] = b"Hello, world!";
1851 let mut dst_buf = [0u8; 13];
1852 let len = Inflate::uncompress(TEST_DATA, &mut dst_buf).unwrap();
1853 assert_eq!(len, 13);
1855 assert_eq!(dst_buf[i], TEST_REF[i]);
1859 fn test_inflate2() {
1860 const TEST_DATA3: &[u8] = &[ 0x4B, 0x4C, 0x44, 0x80, 0x24, 0x54, 0x80, 0x2C, 0x06, 0x00 ];
1861 const TEST_REF3: &[u8] = b"aaaaaaaaaaaabbbbbbbbbbbbbbbaaaaabbbbbbb";
1862 let mut dst_buf = [0u8; 39];
1864 let mut inflate = Inflate::new();
1865 let mut output_chunk = [0u8; 7];
1866 let mut output_pos = 0;
1867 for input in TEST_DATA3.chunks(3) {
1868 let mut repeat = false;
1870 let ret = inflate.decompress_data(input, &mut output_chunk, repeat);
1874 dst_buf[output_pos + i] = output_chunk[i];
1879 Err(DecompressError::ShortData) => {
1882 Err(DecompressError::OutputFull) => {
1884 for i in 0..output_chunk.len() {
1885 dst_buf[output_pos + i] = output_chunk[i];
1887 output_pos += output_chunk.len();
1890 panic!("decompress error {:?}", ret.err().unwrap());
1896 assert_eq!(output_pos, dst_buf.len());
1897 for i in 0..output_pos {
1898 assert_eq!(dst_buf[i], TEST_REF3[i]);
1902 fn test_inflate3() {
1903 const TEST_DATA: &[u8] = &[
1904 0x1F, 0x8B, 0x08, 0x08, 0xF6, 0x7B, 0x90, 0x5E, 0x02, 0x03, 0x31, 0x2E, 0x74, 0x78, 0x74, 0x00,
1905 0xE5, 0x95, 0x4B, 0x4E, 0xC3, 0x30, 0x10, 0x40, 0xF7, 0x39, 0xC5, 0x1C, 0x00, 0x16, 0x70, 0x83,
1906 0x0A, 0xB5, 0x3B, 0xE8, 0x82, 0x5E, 0x60, 0x1A, 0x4F, 0xE2, 0x11, 0xFE, 0x44, 0x1E, 0xA7, 0x69,
1907 0x6E, 0xCF, 0x38, 0xDD, 0xB0, 0x40, 0xA2, 0x46, 0x2D, 0x20, 0x2A, 0xE5, 0xAB, 0xCC, 0xE7, 0xBD,
1908 0x49, 0xAC, 0x6C, 0x03, 0x64, 0x4B, 0xD0, 0x71, 0x92, 0x0C, 0x06, 0x67, 0x88, 0x1D, 0x3C, 0xD9,
1909 0xC4, 0x92, 0x3D, 0x4A, 0xF3, 0x3C, 0x43, 0x4E, 0x23, 0x81, 0x8B, 0x07, 0x82, 0x1E, 0xF5, 0x90,
1910 0x23, 0x78, 0x6A, 0x56, 0x30, 0x60, 0xCA, 0x89, 0x4D, 0x4F, 0xC0, 0x01, 0x10, 0x06, 0xC2, 0xA4,
1911 0xA1, 0x44, 0xCD, 0xF6, 0x54, 0x50, 0xA8, 0x8D, 0xC1, 0x9C, 0x5F, 0x71, 0x37, 0x45, 0xC8, 0x63,
1912 0xCA, 0x8E, 0xC0, 0xE8, 0x23, 0x69, 0x56, 0x9A, 0x8D, 0x5F, 0xB6, 0xC9, 0x96, 0x53, 0x4D, 0x17,
1913 0xAB, 0xB9, 0xB0, 0x49, 0x14, 0x5A, 0x0B, 0x96, 0x82, 0x7C, 0xB7, 0x6F, 0x17, 0x35, 0xC7, 0x9E,
1914 0xDF, 0x78, 0xA3, 0xF1, 0xD0, 0xA2, 0x73, 0x1C, 0x7A, 0xD8, 0x2B, 0xB3, 0x5C, 0x90, 0x85, 0xBB,
1915 0x2A, 0x14, 0x2E, 0xF7, 0xD1, 0x19, 0x48, 0x0A, 0x23, 0x57, 0x45, 0x13, 0x3E, 0xD6, 0xA0, 0xBD,
1916 0xF2, 0x11, 0x7A, 0x22, 0x21, 0xAD, 0xE5, 0x70, 0x56, 0xA0, 0x9F, 0xA5, 0xA5, 0x03, 0x85, 0x2A,
1917 0xDE, 0x92, 0x00, 0x32, 0x61, 0x10, 0xAD, 0x27, 0x13, 0x7B, 0x5F, 0x98, 0x7F, 0x59, 0x83, 0xB8,
1918 0xB7, 0x35, 0x16, 0xEB, 0x12, 0x0F, 0x1E, 0xD9, 0x14, 0x0B, 0xCF, 0xEE, 0x6D, 0x91, 0xF8, 0x93,
1919 0x6E, 0x81, 0x3F, 0x7F, 0x41, 0xA4, 0x22, 0x1F, 0xB7, 0xE6, 0x85, 0x83, 0x9A, 0xA2, 0x61, 0x12,
1920 0x0D, 0x0F, 0x6D, 0x01, 0xBD, 0xB0, 0xE8, 0x1D, 0xEC, 0xD1, 0xA0, 0xBF, 0x1F, 0x4E, 0xFB, 0x55,
1921 0xBD, 0x73, 0xDD, 0x87, 0xB9, 0x53, 0x23, 0x17, 0xD3, 0xE2, 0xE9, 0x08, 0x87, 0x42, 0xFF, 0xCF,
1922 0x26, 0x42, 0xAE, 0x76, 0xB5, 0xAE, 0x97, 0x0C, 0x18, 0x78, 0xA0, 0x24, 0xE5, 0x54, 0x0C, 0x6E,
1923 0x60, 0x52, 0x79, 0x22, 0x57, 0xF5, 0x87, 0x78, 0x78, 0x04, 0x93, 0x46, 0xEF, 0xCB, 0x98, 0x96,
1924 0x8B, 0x65, 0x00, 0xB7, 0x36, 0xBD, 0x77, 0xA8, 0xBD, 0x5A, 0xAA, 0x1A, 0x09, 0x00, 0x00
1927 let mut mr = MemoryReader::new_read(TEST_DATA);
1928 let mut br = ByteReader::new(&mut mr);
1929 let _dst_buf = gzip_decode(&mut br, false).unwrap();
1931 // println!("{}", String::from_utf8_lossy(_dst_buf.as_slice()));
1934 fn test_deflate_crc() {
1935 let output = Vec::with_capacity(20);
1936 let mut writer = DeflateWriter::new(output);
1937 let mut compr = Deflate::new(DeflateMode::NoCompr);
1938 compr.write_zlib_header(&mut writer);
1939 compr.compress(b"Hello, world!", &mut writer);
1940 compr.compress_end(&mut writer);
1941 let output = writer.end();
1942 assert_eq!(output.as_slice(), b"\x78\x01\x01\x0D\x00\xF2\xFFHello, world!\x20\x5E\x04\x8A");
1944 fn deflate_test(mode: DeflateMode) {
1946 b"The first day of Christmas,
1947 My true love sent to me
1948 A partridge in a pear tree.
1950 The second day of Christmas,
1951 My true love sent to me
1952 Two turtle doves, and
1953 A partridge in a pear tree.
1955 The third day of Christmas,
1956 My true love sent to me
1958 Two turtle doves, and
1959 A partridge in a pear tree.
1961 The fourth day of Christmas,
1962 My true love sent to me
1965 Two turtle doves, and
1966 A partridge in a pear tree.
1968 The fifth day of Christmas,
1969 My true love sent to me
1973 Two turtle doves, and
1974 A partridge in a pear tree.";
1975 let output = Vec::with_capacity(SRC.len() + 16);
1976 let mut writer = DeflateWriter::new(output);
1977 let mut compr = Deflate::new(mode);
1978 compr.write_zlib_header(&mut writer);
1979 compr.compress(SRC, &mut writer);
1980 compr.compress_end(&mut writer);
1981 let output = writer.end();
1982 let mut uncompr = vec![0u8; SRC.len()];
1983 Inflate::uncompress(&output, &mut uncompr).unwrap();
1984 assert_eq!(SRC, uncompr.as_slice());
1987 fn test_deflate_fast() {
1988 deflate_test(DeflateMode::Fast);
1991 fn test_deflate_better() {
1992 deflate_test(DeflateMode::Better);
1995 fn test_deflate_best() {
1996 deflate_test(DeflateMode::Best);