]> git.nihav.org Git - nihav.git/blob - nihav-core/src/compr/deflate.rs
disable or remove unneeded debug messages
[nihav.git] / nihav-core / src / compr / deflate.rs
1 //! Deflate format (RFC 1951) support.
2 //!
3 //! This module provides functionality for decompressing raw deflated streams via [`Inflate`] and gzip files (RFC 1952) via [`gzip_decode`] and compressing raw or zlib streams via [`Deflate`].
4 //!
5 //! [`Deflate`]: ./struct.Deflate.html
6 //! [`Inflate`]: ./struct.Inflate.html
7 //! [`gzip_decode`]: ./fn.gzip_decode.html
8 //!
9 //! # Examples
10 //!
11 //! Decompressing full input buffer into sufficiently large output buffer:
12 //! ```
13 //! # use nihav_core::compr::DecompressError;
14 //! use nihav_core::compr::deflate::Inflate;
15 //!
16 //! # fn decompress(input: &[u8]) -> Result<(), DecompressError> {
17 //! # let mut output_buffer = [0u8; 16];
18 //! let output_length = Inflate::uncompress(input, &mut output_buffer)?;
19 //! # Ok(())
20 //! # }
21 //! ```
22 //!
23 //! Decompressing input chunks into portions of output:
24 //! ```
25 //! use nihav_core::compr::DecompressError;
26 //! use nihav_core::compr::deflate::Inflate;
27 //!
28 //! # fn decompress(input_data: &[u8]) -> Result<(), DecompressError> {
29 //! let mut inflate = Inflate::new();
30 //! let mut dst_buf: Vec<u8> = Vec::new();
31 //! let mut output_chunk = [0u8; 1024];
32 //! for src in input_data.chunks(512) {
33 //! let mut repeat = false;
34 //! loop {
35 //! let ret = inflate.decompress_data(src, &mut output_chunk, repeat);
36 //! match ret {
37 //! Ok(len) => { // we got a buffer decoded successfully to the end
38 //! dst_buf.extend_from_slice(&output_chunk[..len]);
39 //! break;
40 //! },
41 //! Err(DecompressError::ShortData) => { // this block of data was fully read
42 //! break;
43 //! },
44 //! Err(DecompressError::OutputFull) => {
45 //! // the output buffer is full, flush it and continue decoding the same block
46 //! repeat = true;
47 //! dst_buf.extend_from_slice(&output_chunk);
48 //! },
49 //! Err(err) => {
50 //! return Err(err);
51 //! },
52 //! }
53 //! }
54 //! }
55 //! # Ok(())
56 //! # }
57 //! ```
58 //!
59 //! Compressing input buffer into zlib stream:
60 //! ```
61 //! use nihav_core::compr::deflate::{Deflate, DeflateMode, DeflateWriter};
62 //!
63 //! # fn compress(input: &[u8]) {
64 //! let output = Vec::with_capacity(input.len() * 65540 / 65535 + 6);
65 //! let mut writer = DeflateWriter::new(output);
66 //! let mut compr = Deflate::new(DeflateMode::Fast);
67 //! compr.write_zlib_header(&mut writer);
68 //! compr.compress(input, &mut writer);
69 //! compr.compress_end(&mut writer);
70 //! let output = writer.end();
71 //! # }
72 //! ```
73
74 use crate::options::NAOptionDefinitionType;
75 use crate::io::byteio::*;
76 use crate::io::bitreader::*;
77 use crate::io::codebook::*;
78 use super::*;
79
80 const NUM_LITERALS: usize = 287;
81 const NUM_DISTS: usize = 32;
82
83 struct FixedLenCodeReader {}
84
85 impl CodebookDescReader<u16> for FixedLenCodeReader {
86 fn bits(&mut self, idx: usize) -> u8 {
87 if idx < 144 { 8 }
88 else if idx < 256 { 9 }
89 else if idx < 280 { 7 }
90 else { 8 }
91 }
92 #[allow(clippy::identity_op)]
93 fn code(&mut self, idx: usize) -> u32 {
94 let base = idx as u32;
95 let bits = self.bits(idx);
96 if idx < 144 { reverse_bits(base + 0x30, bits) }
97 else if idx < 256 { reverse_bits(base + 0x190 - 144, bits) }
98 else if idx < 280 { reverse_bits(base + 0x000 - 256, bits) }
99 else { reverse_bits(base + 0xC0 - 280, bits) }
100 }
101 fn sym (&mut self, idx: usize) -> u16 { idx as u16 }
102 fn len(&mut self) -> usize { NUM_LITERALS + 1 }
103 }
104
105 #[derive(Clone,Copy,Default)]
106 struct BitReaderState {
107 pos: usize,
108 bitbuf: u32,
109 bits: u8,
110 }
111
112 struct CurrentSource<'a> {
113 src: &'a [u8],
114 br: BitReaderState,
115 }
116
117 impl<'a> CurrentSource<'a> {
118 fn new(src: &'a [u8], br: BitReaderState) -> Self {
119 let mut newsrc = Self { src, br };
120 newsrc.br.pos = 0;
121 newsrc.refill();
122 newsrc
123 }
124 fn reinit(src: &'a [u8], br: BitReaderState) -> Self {
125 let mut newsrc = Self { src, br };
126 newsrc.refill();
127 newsrc
128 }
129 fn refill(&mut self) {
130 while (self.br.bits <= 24) && (self.br.pos < self.src.len()) {
131 self.br.bitbuf |= u32::from(self.src[self.br.pos]) << self.br.bits;
132 self.br.bits += 8;
133 self.br.pos += 1;
134 }
135 }
136 fn skip_cache(&mut self, nbits: u8) {
137 self.br.bitbuf >>= nbits;
138 self.br.bits -= nbits;
139 }
140 fn read(&mut self, nbits: u8) -> BitReaderResult<u32> {
141 if nbits == 0 { return Ok(0); }
142 if nbits > 16 { return Err(BitReaderError::TooManyBitsRequested); }
143 if self.br.bits < nbits {
144 self.refill();
145 if self.br.bits < nbits { return Err(BitReaderError::BitstreamEnd); }
146 }
147 let ret = self.br.bitbuf & ((1 << nbits) - 1);
148 self.skip_cache(nbits);
149 Ok(ret)
150 }
151 fn read_bool(&mut self) -> BitReaderResult<bool> {
152 if self.br.bits == 0 {
153 self.refill();
154 if self.br.bits == 0 { return Err(BitReaderError::BitstreamEnd); }
155 }
156 let ret = (self.br.bitbuf & 1) != 0;
157 self.skip_cache(1);
158 Ok(ret)
159 }
160 fn peek(&mut self, nbits: u8) -> u32 {
161 if nbits == 0 || nbits > 16 { return 0; }
162 if self.br.bits < nbits {
163 self.refill();
164 }
165 self.br.bitbuf & ((1 << nbits) - 1)
166 }
167 fn skip(&mut self, nbits: u32) -> BitReaderResult<()> {
168 if u32::from(self.br.bits) >= nbits {
169 self.skip_cache(nbits as u8);
170 } else {
171 unreachable!();
172 }
173 Ok(())
174 }
175 fn align(&mut self) {
176 let b = self.br.bits & 7;
177 if b != 0 {
178 self.skip_cache(b);
179 }
180 }
181 fn left(&self) -> isize {
182 ((self.src.len() as isize) - (self.br.pos as isize)) * 8 + (self.br.bits as isize)
183 }
184 }
185
186 impl<'a, S: Copy> CodebookReader<S> for CurrentSource<'a> {
187 fn read_cb(&mut self, cb: &Codebook<S>) -> CodebookResult<S> {
188 let mut esc = true;
189 let mut idx = 0;
190 let mut lut_bits = cb.lut_bits;
191 let orig_br = self.br;
192 while esc {
193 let lut_idx = (self.peek(lut_bits) as usize) + (idx as usize);
194 if cb.table[lut_idx] == TABLE_FILL_VALUE { return Err(CodebookError::InvalidCode); }
195 let bits = cb.table[lut_idx] & 0x7F;
196 esc = (cb.table[lut_idx] & 0x80) != 0;
197 idx = (cb.table[lut_idx] >> 8) as usize;
198 let skip_bits = if esc { u32::from(lut_bits) } else { bits };
199 if (skip_bits as isize) > self.left() {
200 self.br = orig_br;
201 self.refill();
202 return Err(CodebookError::MemoryError);
203 }
204 self.skip(skip_bits as u32).unwrap();
205 lut_bits = bits as u8;
206 }
207 Ok(cb.syms[idx])
208 }
209 }
210
211 enum InflateState {
212 Start,
213 BlockStart,
214 BlockMode,
215 StaticBlockLen,
216 StaticBlockInvLen(u32),
217 StaticBlockCopy(usize),
218 FixedBlock,
219 FixedBlockLengthExt(usize, u8),
220 FixedBlockDist(usize),
221 FixedBlockDistExt(usize, usize, u8),
222 FixedBlockCopy(usize, usize),
223 FixedBlockLiteral(u8),
224 DynBlockHlit,
225 DynBlockHdist,
226 DynBlockHclen,
227 DynLengths(usize),
228 DynCodeLengths,
229 DynCodeLengthsAdd(usize),
230 DynBlock,
231 DynBlockLengthExt(usize, u8),
232 DynBlockDist(usize),
233 DynBlockDistExt(usize, usize, u8),
234 DynCopy(usize, usize),
235 DynBlockLiteral(u8),
236 End,
237 }
238
239 ///! The decompressor for deflated streams (RFC 1951).
240 pub struct Inflate {
241 br: BitReaderState,
242 fix_len_cb: Codebook<u16>,
243
244 buf: [u8; 65536],
245 bpos: usize,
246 output_idx: usize,
247 full_pos: usize,
248
249 state: InflateState,
250 final_block: bool,
251 hlit: usize,
252 hdist: usize,
253 dyn_len_cb: Option<Codebook<u32>>,
254 dyn_lit_cb: Option<Codebook<u32>>,
255 dyn_dist_cb: Option<Codebook<u32>>,
256 len_lengths: [u8; 19],
257 all_lengths: [u8; NUM_LITERALS + NUM_DISTS],
258 cur_len_idx: usize,
259 }
260
261 const LENGTH_ADD_BITS: [u8; 29] = [
262 0, 0, 0, 0, 0, 0, 0, 0, 1, 1,
263 1, 1, 2, 2, 2, 2, 3, 3, 3, 3,
264 4, 4, 4, 4, 5, 5, 5, 5, 0
265 ];
266 const LENGTH_BASE: [u16; 29] = [
267 3, 4, 5, 6, 7, 8, 9, 10, 11, 13,
268 15, 17, 19, 23, 27, 31, 35, 43, 51, 59,
269 67, 83, 99, 115, 131, 163, 195, 227, 258
270 ];
271 const DIST_ADD_BITS: [u8; 30] = [
272 0, 0, 0, 0, 1, 1, 2, 2, 3, 3,
273 4, 4, 5, 5, 6, 6, 7, 7, 8, 8,
274 9, 9, 10, 10, 11, 11, 12, 12, 13, 13
275 ];
276 const DIST_BASE: [u16; 30] = [
277 1, 2, 3, 4, 5, 7, 9, 13, 17, 25,
278 33, 49, 65, 97, 129, 193, 257, 385, 513, 769,
279 1025, 1537, 2049, 3073, 4097, 6145, 8193, 12289, 16385, 24577
280 ];
281 const LEN_RECODE: [usize; 19] = [
282 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15
283 ];
284 const REPEAT_BITS: [u8; 3] = [ 2, 3, 7 ];
285 const REPEAT_BASE: [u8; 3] = [ 3, 3, 11 ];
286
287 macro_rules! read_bits {
288 ($self: expr, $csrc: expr, $bits: expr) => ({
289 if $csrc.left() < $bits as isize {
290 $self.br = $csrc.br;
291 return Err(DecompressError::ShortData);
292 }
293 $csrc.read($bits).unwrap()
294 })
295 }
296
297 macro_rules! read_cb {
298 ($self: expr, $csrc: expr, $cb: expr) => ({
299 let ret = $csrc.read_cb($cb);
300 if let Err(CodebookError::MemoryError) = ret {
301 $self.br = $csrc.br;
302 return Err(DecompressError::ShortData);
303 }
304 match ret {
305 Ok(val) => val,
306 Err(_) => {
307 $self.state = InflateState::End;
308 return Err(DecompressError::InvalidData);
309 },
310 }
311 })
312 }
313
314 impl Inflate {
315 ///! Creates a new instance of `Inflate` struct.
316 pub fn new() -> Self {
317 let mut cr = FixedLenCodeReader {};
318 let fix_len_cb = Codebook::new(&mut cr, CodebookMode::LSB).unwrap();
319 Self {
320 br: BitReaderState::default(),
321 fix_len_cb,
322
323 buf: [0; 65536],
324 bpos: 0,
325 output_idx: 0,
326 full_pos: 0,
327
328 state: InflateState::Start,
329 final_block: false,
330 dyn_len_cb: None,
331 dyn_lit_cb: None,
332 dyn_dist_cb: None,
333 hlit: 0,
334 hdist: 0,
335 len_lengths: [0; 19],
336 all_lengths: [0; NUM_LITERALS + NUM_DISTS],
337 cur_len_idx: 0,
338 }
339 }
340 fn put_literal(&mut self, val: u8) {
341 self.buf[self.bpos] = val;
342 self.bpos = (self.bpos + 1) & (self.buf.len() - 1);
343 self.full_pos += 1;
344 }
345 fn lz_copy(&mut self, offset: usize, len: usize, dst: &mut [u8]) -> DecompressResult<()> {
346 let mask = self.buf.len() - 1;
347 if offset > self.full_pos {
348 return Err(DecompressError::InvalidData);
349 }
350 let cstart = (self.bpos.wrapping_sub(offset)) & mask;
351 for i in 0..len {
352 self.buf[(self.bpos + i) & mask] = self.buf[(cstart + i) & mask];
353 dst[i] = self.buf[(cstart + i) & mask];
354 }
355 self.bpos = (self.bpos + len) & mask;
356 self.full_pos += len;
357 Ok(())
358 }
359 ///! Sets custom history for decoding an update for already decoded data.
360 pub fn set_dict(&mut self, dict: &[u8]) {
361 let len = dict.len().min(self.buf.len());
362 let start = dict.len() - len;
363 self.buf[..len].copy_from_slice(&dict[start..]);
364 self.bpos = len;
365 self.full_pos = len;
366 }
367 ///! Reports whether decoder has finished decoding the input.
368 pub fn is_finished(&self) -> bool {
369 matches!(self.state, InflateState::End)
370 }
371 ///! Reports the current amount of bytes output into the destination buffer after the last run.
372 pub fn get_current_output_size(&self) -> usize { self.output_idx }
373 ///! Reports the total amount of bytes decoded so far.
374 pub fn get_total_output_size(&self) -> usize { self.bpos }
375 ///! Tries to decompress input data and write it to the output buffer.
376 ///!
377 ///! Since the decompressor can work with arbitrary input and output chunks its return value may have several meanings:
378 ///! * `Ok(len)` means the stream has been fully decoded and then number of bytes output into the destination buffer is returned.
379 ///! * [`DecompressError::ShortData`] means the input stream has been fully read but more data is needed.
380 ///! * [`DecompressError::OutputFull`] means the output buffer is full and should be flushed. Then decoding should continue on the same input block with `continue_block` parameter set to `true`.
381 ///!
382 ///! [`DecompressError::ShortData`]: ../enum.DecompressError.html#variant.ShortData
383 ///! [`DecompressError::OutputFull`]: ../enum.DecompressError.html#variant.OutputFull
384 pub fn decompress_data(&mut self, src: &[u8], dst: &mut [u8], continue_block: bool) -> DecompressResult<usize> {
385 self.decompress_data_internal(src, dst, continue_block, false)
386 }
387 ///! Tries to decompress whole input chunk to the output buffer.
388 pub fn decompress_block(&mut self, src: &[u8], dst: &mut [u8]) -> DecompressResult<usize> {
389 self.decompress_data_internal(src, dst, false, true)
390 }
391 #[allow(clippy::comparison_chain)]
392 fn decompress_data_internal(&mut self, src: &[u8], dst: &mut [u8], continue_block: bool, do_one_block: bool) -> DecompressResult<usize> {
393 if src.is_empty() || dst.is_empty() {
394 return Err(DecompressError::InvalidArgument);
395 }
396 let mut csrc = if !continue_block {
397 CurrentSource::new(src, self.br)
398 } else {
399 self.output_idx = 0;
400 CurrentSource::reinit(src, self.br)
401 };
402 if do_one_block {
403 self.output_idx = 0;
404 }
405 // check for zlib stream header
406 if let (&InflateState::Start, true) = (&self.state, src.len() > 2) {
407 let cm = src[0] & 0xF;
408 let cinfo = src[0] >> 4;
409 let hdr = (u16::from(src[0]) << 8) | u16::from(src[1]);
410 if cm == 8 && cinfo <= 7 && (hdr % 31) == 0 {
411 csrc.skip(16).unwrap();
412 }
413 }
414 'main: loop {
415 match self.state {
416 InflateState::Start | InflateState::BlockStart => {
417 if csrc.left() == 0 {
418 if do_one_block {
419 return Ok(self.output_idx);
420 }
421 self.br = csrc.br;
422 return Err(DecompressError::ShortData);
423 }
424 self.final_block = csrc.read_bool().unwrap();
425 self.state = InflateState::BlockMode;
426 },
427 InflateState::BlockMode => {
428 let bmode = read_bits!(self, csrc, 2);
429 match bmode {
430 0 => {
431 csrc.align();
432 self.state = InflateState::StaticBlockLen;
433 },
434 1 => { self.state = InflateState::FixedBlock; },
435 2 => { self.state = InflateState::DynBlockHlit; },
436 _ => {
437 self.state = InflateState::End;
438 return Err(DecompressError::InvalidHeader);
439 },
440 };
441 },
442 InflateState::StaticBlockLen => {
443 let len = read_bits!(self, csrc, 16);
444 self.state = InflateState::StaticBlockInvLen(len);
445 },
446 InflateState::StaticBlockInvLen(len) => {
447 let inv_len = read_bits!(self, csrc, 16);
448 if (len ^ inv_len) != 0xFFFF {
449 self.state = InflateState::End;
450 return Err(DecompressError::InvalidHeader);
451 }
452 self.state = InflateState::StaticBlockCopy(len as usize);
453 },
454 InflateState::StaticBlockCopy(len) => {
455 for i in 0..len {
456 if csrc.left() < 8 {
457 self.br = csrc.br;
458 self.state = InflateState::StaticBlockCopy(len - i);
459 return Err(DecompressError::ShortData);
460 }
461 let val = csrc.read(8).unwrap() as u8;
462 self.put_literal(val);
463 }
464 self.state = InflateState::BlockStart;
465 }
466 InflateState::FixedBlock => {
467 let val = read_cb!(self, csrc, &self.fix_len_cb);
468 if val < 256 {
469 if self.output_idx >= dst.len() {
470 self.br = csrc.br;
471 self.state = InflateState::FixedBlockLiteral(val as u8);
472 return Err(DecompressError::OutputFull);
473 }
474 self.put_literal(val as u8);
475 dst[self.output_idx] = val as u8;
476 self.output_idx += 1;
477 } else if val == 256 {
478 if self.final_block {
479 self.state = InflateState::End;
480 return Ok(self.output_idx);
481 } else {
482 self.state = InflateState::BlockStart;
483 }
484 } else {
485 let len_idx = (val - 257) as usize;
486 if len_idx >= LENGTH_BASE.len() {
487 self.state = InflateState::End;
488 return Err(DecompressError::InvalidData);
489 }
490 let len_bits = LENGTH_ADD_BITS[len_idx];
491 let add_base = LENGTH_BASE[len_idx] as usize;
492 if len_bits > 0 {
493 self.state = InflateState::FixedBlockLengthExt(add_base, len_bits);
494 } else {
495 self.state = InflateState::FixedBlockDist(add_base);
496 }
497 }
498 },
499 InflateState::FixedBlockLiteral(sym) => {
500 if self.output_idx >= dst.len() {
501 self.br = csrc.br;
502 return Err(DecompressError::OutputFull);
503 }
504 self.put_literal(sym);
505 dst[self.output_idx] = sym;
506 self.output_idx += 1;
507 self.state = InflateState::FixedBlock;
508 },
509 InflateState::FixedBlockLengthExt(base, bits) => {
510 let add = read_bits!(self, csrc, bits) as usize;
511 self.state = InflateState::FixedBlockDist(base + add);
512 },
513 InflateState::FixedBlockDist(length) => {
514 let dist_idx = reverse_bits(read_bits!(self, csrc, 5), 5) as usize;
515 if dist_idx >= DIST_BASE.len() {
516 self.state = InflateState::End;
517 return Err(DecompressError::InvalidData);
518 }
519 let dist_bits = DIST_ADD_BITS[dist_idx];
520 let dist_base = DIST_BASE[dist_idx] as usize;
521 if dist_bits == 0 {
522 self.state = InflateState::FixedBlockCopy(length, dist_base);
523 } else {
524 self.state = InflateState::FixedBlockDistExt(length, dist_base, dist_bits);
525 }
526 },
527 InflateState::FixedBlockDistExt(length, base, bits) => {
528 let add = read_bits!(self, csrc, bits) as usize;
529 self.state = InflateState::FixedBlockCopy(length, base + add);
530 },
531 InflateState::FixedBlockCopy(length, dist) => {
532 if self.output_idx + length > dst.len() {
533 let copy_size = dst.len() - self.output_idx;
534 let ret = self.lz_copy(dist, copy_size, &mut dst[self.output_idx..]);
535 if ret.is_err() {
536 self.state = InflateState::End;
537 return Err(DecompressError::InvalidData);
538 }
539 self.output_idx += copy_size;
540 self.br = csrc.br;
541 self.state = InflateState::FixedBlockCopy(length - copy_size, dist);
542 return Err(DecompressError::OutputFull);
543 }
544 let ret = self.lz_copy(dist, length, &mut dst[self.output_idx..]);
545 if ret.is_err() {
546 self.state = InflateState::End;
547 return Err(DecompressError::InvalidData);
548 }
549 self.output_idx += length;
550 self.state = InflateState::FixedBlock;
551 }
552 InflateState::DynBlockHlit => {
553 self.hlit = (read_bits!(self, csrc, 5) as usize) + 257;
554 if self.hlit >= 287 {
555 self.state = InflateState::End;
556 return Err(DecompressError::InvalidHeader);
557 }
558 self.state = InflateState::DynBlockHdist;
559 }
560 InflateState::DynBlockHdist => {
561 self.hdist = (read_bits!(self, csrc, 5) as usize) + 1;
562 self.state = InflateState::DynBlockHclen;
563 },
564 InflateState::DynBlockHclen => {
565 let hclen = (read_bits!(self, csrc, 4) as usize) + 4;
566 self.cur_len_idx = 0;
567 self.len_lengths = [0; 19];
568 self.all_lengths = [0; NUM_LITERALS + NUM_DISTS];
569 self.state = InflateState::DynLengths(hclen);
570 },
571 InflateState::DynLengths(len) => {
572 for i in 0..len {
573 if csrc.left() < 3 {
574 self.br = csrc.br;
575 self.state = InflateState::DynLengths(len - i);
576 return Err(DecompressError::ShortData);
577 }
578 self.len_lengths[LEN_RECODE[self.cur_len_idx]] = csrc.read(3).unwrap() as u8;
579 self.cur_len_idx += 1;
580 }
581 let mut len_codes = [ShortCodebookDesc { code: 0, bits: 0 }; 19];
582 lengths_to_codes(&self.len_lengths, &mut len_codes)?;
583 let mut cr = ShortCodebookDescReader::new(len_codes.to_vec());
584 let ret = Codebook::new(&mut cr, CodebookMode::LSB);
585 if ret.is_err() {
586 self.state = InflateState::End;
587 return Err(DecompressError::InvalidHeader);
588 }
589 self.dyn_len_cb = Some(ret.unwrap());
590 self.cur_len_idx = 0;
591 self.state = InflateState::DynCodeLengths;
592 },
593 InflateState::DynCodeLengths => {
594 if let Some(ref len_cb) = self.dyn_len_cb {
595 while self.cur_len_idx < self.hlit + self.hdist {
596 let ret = csrc.read_cb(len_cb);
597 let val = match ret {
598 Ok(val) => val,
599 Err(CodebookError::MemoryError) => {
600 self.br = csrc.br;
601 return Err(DecompressError::ShortData);
602 },
603 Err(_) => {
604 self.state = InflateState::End;
605 return Err(DecompressError::InvalidHeader);
606 },
607 };
608 if val < 16 {
609 self.all_lengths[self.cur_len_idx] = val as u8;
610 self.cur_len_idx += 1;
611 } else {
612 let idx = (val as usize) - 16;
613 if idx > 2 {
614 self.state = InflateState::End;
615 return Err(DecompressError::InvalidHeader);
616 }
617 self.state = InflateState::DynCodeLengthsAdd(idx);
618 continue 'main;
619 }
620 }
621 let (lit_lengths, dist_lengths) = self.all_lengths.split_at(self.hlit);
622
623 let mut lit_codes = [ShortCodebookDesc { code: 0, bits: 0 }; NUM_LITERALS];
624 lengths_to_codes(lit_lengths, &mut lit_codes)?;
625 let mut cr = ShortCodebookDescReader::new(lit_codes.to_vec());
626 let ret = Codebook::new(&mut cr, CodebookMode::LSB);
627 if ret.is_err() { return Err(DecompressError::InvalidHeader); }
628 self.dyn_lit_cb = Some(ret.unwrap());
629
630 let mut dist_codes = [ShortCodebookDesc { code: 0, bits: 0 }; NUM_DISTS];
631 lengths_to_codes(&dist_lengths[..self.hdist], &mut dist_codes)?;
632 let mut cr = ShortCodebookDescReader::new(dist_codes.to_vec());
633 let ret = Codebook::new(&mut cr, CodebookMode::LSB);
634 if ret.is_err() { return Err(DecompressError::InvalidHeader); }
635 self.dyn_dist_cb = Some(ret.unwrap());
636
637 self.state = InflateState::DynBlock;
638 } else {
639 unreachable!();
640 }
641 },
642 InflateState::DynCodeLengthsAdd(mode) => {
643 let base = REPEAT_BASE[mode] as usize;
644 let bits = REPEAT_BITS[mode];
645 let len = base + read_bits!(self, csrc, bits) as usize;
646 if self.cur_len_idx + len > self.hlit + self.hdist {
647 self.state = InflateState::End;
648 return Err(DecompressError::InvalidHeader);
649 }
650 let rpt = if mode == 0 {
651 if self.cur_len_idx == 0 {
652 self.state = InflateState::End;
653 return Err(DecompressError::InvalidHeader);
654 }
655 self.all_lengths[self.cur_len_idx - 1]
656 } else {
657 0
658 };
659 for _ in 0..len {
660 self.all_lengths[self.cur_len_idx] = rpt;
661 self.cur_len_idx += 1;
662 }
663 self.state = InflateState::DynCodeLengths;
664 },
665 InflateState::DynBlock => {
666 if let Some(ref lit_cb) = self.dyn_lit_cb {
667 let val = read_cb!(self, csrc, lit_cb);
668 if val < 256 {
669 if self.output_idx >= dst.len() {
670 self.br = csrc.br;
671 self.state = InflateState::DynBlockLiteral(val as u8);
672 return Err(DecompressError::OutputFull);
673 }
674 self.put_literal(val as u8);
675 dst[self.output_idx] = val as u8;
676 self.output_idx += 1;
677 } else if val == 256 {
678 if self.final_block {
679 self.state = InflateState::End;
680 return Ok(self.output_idx);
681 } else {
682 self.state = InflateState::BlockStart;
683 }
684 } else {
685 let len_idx = (val - 257) as usize;
686 if len_idx >= LENGTH_BASE.len() {
687 self.state = InflateState::End;
688 return Err(DecompressError::InvalidData);
689 }
690 let len_bits = LENGTH_ADD_BITS[len_idx];
691 let add_base = LENGTH_BASE[len_idx] as usize;
692 if len_bits > 0 {
693 self.state = InflateState::DynBlockLengthExt(add_base, len_bits);
694 } else {
695 self.state = InflateState::DynBlockDist(add_base);
696 }
697 }
698 } else {
699 unreachable!();
700 }
701 },
702 InflateState::DynBlockLiteral(sym) => {
703 if self.output_idx >= dst.len() {
704 self.br = csrc.br;
705 return Err(DecompressError::OutputFull);
706 }
707 self.put_literal(sym);
708 dst[self.output_idx] = sym;
709 self.output_idx += 1;
710 self.state = InflateState::DynBlock;
711 },
712 InflateState::DynBlockLengthExt(base, bits) => {
713 let add = read_bits!(self, csrc, bits) as usize;
714 self.state = InflateState::DynBlockDist(base + add);
715 },
716 InflateState::DynBlockDist(length) => {
717 if let Some(ref dist_cb) = self.dyn_dist_cb {
718 let dist_idx = read_cb!(self, csrc, dist_cb) as usize;
719 if dist_idx >= DIST_BASE.len() {
720 self.state = InflateState::End;
721 return Err(DecompressError::InvalidData);
722 }
723 let dist_bits = DIST_ADD_BITS[dist_idx];
724 let dist_base = DIST_BASE[dist_idx] as usize;
725 if dist_bits == 0 {
726 self.state = InflateState::DynCopy(length, dist_base);
727 } else {
728 self.state = InflateState::DynBlockDistExt(length, dist_base, dist_bits);
729 }
730 } else {
731 unreachable!();
732 }
733 },
734 InflateState::DynBlockDistExt(length, base, bits) => {
735 let add = read_bits!(self, csrc, bits) as usize;
736 self.state = InflateState::DynCopy(length, base + add);
737 },
738 InflateState::DynCopy(length, dist) => {
739 if self.output_idx + length > dst.len() {
740 let copy_size = dst.len() - self.output_idx;
741 let ret = self.lz_copy(dist, copy_size, &mut dst[self.output_idx..]);
742 if ret.is_err() {
743 self.state = InflateState::End;
744 return Err(DecompressError::InvalidData);
745 }
746 self.output_idx += copy_size;
747 self.br = csrc.br;
748 self.state = InflateState::DynCopy(length - copy_size, dist);
749 return Err(DecompressError::OutputFull);
750 }
751 let ret = self.lz_copy(dist, length, &mut dst[self.output_idx..]);
752 if ret.is_err() {
753 self.state = InflateState::End;
754 return Err(DecompressError::InvalidData);
755 }
756 self.output_idx += length;
757 self.state = InflateState::DynBlock;
758 }
759 InflateState::End => {
760 return Ok(0);
761 },
762 }
763 }
764 }
765 ///! Resets decoder state.
766 pub fn reset(&mut self) {
767 self.bpos = 0;
768 self.output_idx = 0;
769 self.full_pos = 0;
770 self.state = InflateState::Start;
771 }
772
773 ///! Decompresses input data into output returning the uncompressed data length.
774 pub fn uncompress(src: &[u8], dst: &mut [u8]) -> DecompressResult<usize> {
775 let mut inflate = Self::new();
776 inflate.decompress_data(src, dst, false)
777 }
778 }
779
780 impl Default for Inflate {
781 fn default() -> Self {
782 Self::new()
783 }
784 }
785
786 fn lengths_to_codes(lens: &[u8], codes: &mut [ShortCodebookDesc]) -> DecompressResult<()> {
787 let mut bits = [0u32; 32];
788 let mut pfx = [0u32; 33];
789 for len in lens.iter() {
790 let len = *len as usize;
791 if len >= bits.len() {
792 return Err(DecompressError::InvalidHeader);
793 }
794 bits[len] += 1;
795 }
796 bits[0] = 0;
797 let mut code = 0;
798 for i in 0..bits.len() {
799 code = (code + bits[i]) << 1;
800 pfx[i + 1] = code;
801 }
802
803 for (len, codes) in lens.iter().zip(codes.iter_mut()) {
804 let len = *len as usize;
805 if len != 0 {
806 let bits = len as u8;
807 *codes = ShortCodebookDesc { code: reverse_bits(pfx[len], bits), bits };
808 pfx[len] += 1;
809 } else {
810 *codes = ShortCodebookDesc { code: 0, bits: 0 };
811 }
812 }
813
814 Ok(())
815 }
816
817 struct GzipCRC32 {
818 tab: [u32; 256],
819 crc: u32,
820 }
821
822 impl GzipCRC32 {
823 #[allow(clippy::unreadable_literal)]
824 fn new() -> Self {
825 let mut tab = [0u32; 256];
826 for i in 0..256 {
827 let mut c = i as u32;
828 for _ in 0..8 {
829 if (c & 1) != 0 {
830 c = 0xEDB88320 ^ (c >> 1);
831 } else {
832 c >>= 1;
833 }
834 }
835 tab[i] = c;
836 }
837 Self { tab, crc: 0 }
838 }
839 fn update_crc(&mut self, src: &[u8]) {
840 let mut c = !self.crc;
841 for el in src.iter() {
842 c = self.tab[((c ^ u32::from(*el)) & 0xFF) as usize] ^ (c >> 8);
843 }
844 self.crc = !c;
845 }
846 }
847
848 ///! Decodes input data in gzip file format (RFC 1952) returning a vector containing decoded data.
849 pub fn gzip_decode(br: &mut ByteReader, skip_crc: bool) -> DecompressResult<Vec<u8>> {
850 const FLAG_HCRC: u8 = 0x02;
851 const FLAG_EXTRA: u8 = 0x04;
852 const FLAG_NAME: u8 = 0x08;
853 const FLAG_COMMENT: u8 = 0x10;
854
855 let id1 = br.read_byte()?;
856 let id2 = br.read_byte()?;
857 let cm = br.read_byte()?;
858 let flg = br.read_byte()?;
859 let _mtime = br.read_u32le()?;
860 let _xfl = br.read_byte()?;
861 let _os = br.read_byte()?;
862 if id1 != 0x1F || id2 != 0x8B || cm != 8 {
863 return Err(DecompressError::InvalidHeader);
864 }
865
866 if (flg & FLAG_EXTRA) != 0 {
867 let xlen = br.read_u16le()? as usize;
868 br.read_skip(xlen)?;
869 }
870 if (flg & FLAG_NAME) != 0 {
871 loop {
872 let b = br.read_byte()?;
873 if b == 0 {
874 break;
875 }
876 }
877 }
878 if (flg & FLAG_COMMENT) != 0 {
879 loop {
880 let b = br.read_byte()?;
881 if b == 0 {
882 break;
883 }
884 }
885 }
886 let _hcrc = if (flg & FLAG_HCRC) != 0 {
887 br.read_u16le()?
888 } else {
889 0
890 };
891 if (flg & 0xE0) != 0 {
892 return Err(DecompressError::Unsupported);
893 }
894
895 let mut output: Vec<u8> = Vec::new();
896 let mut tail = [0u8; 8];
897 let mut inblk = [0u8; 1024];
898 let mut oblk = [0u8; 4096];
899 let mut inflate = Inflate::new();
900 let mut checker = GzipCRC32::new();
901
902 loop {
903 let ret = br.read_buf_some(&mut inblk);
904 if let Err(ByteIOError::EOF) = ret {
905 break;
906 }
907 let inlen = match ret {
908 Ok(val) => val,
909 Err(_) => return Err(DecompressError::IOError),
910 };
911 let mut repeat = false;
912 loop {
913 let ret = inflate.decompress_data(&inblk[..inlen], &mut oblk, repeat);
914 match ret {
915 Ok(outlen) => {
916 checker.update_crc(&oblk[..outlen]);
917 output.extend_from_slice(&oblk[..outlen]);
918 break;
919 },
920 Err(DecompressError::ShortData) => {
921 break;
922 },
923 Err(DecompressError::OutputFull) => {
924 repeat = true;
925 checker.update_crc(&oblk);
926 output.extend_from_slice(&oblk);
927 },
928 Err(err) => {
929 return Err(err);
930 },
931 }
932 }
933 // Save last 8 bytes for CRC and size.
934 if inlen >= 8 {
935 tail.copy_from_slice(&inblk[inlen - 8..][..8]);
936 } else {
937 let shift_len = 8 - inlen;
938 for i in 0..shift_len {
939 tail[i] = tail[i + inlen];
940 }
941 for i in shift_len..8 {
942 tail[i] = inblk[i - shift_len];
943 }
944 }
945 }
946 if !skip_crc {
947 if !inflate.is_finished() { println!("???"); }
948 let crc = read_u32le(&tail[0..4])?;
949 let size = read_u32le(&tail[4..8])?;
950 if size != (output.len() as u32) {
951 return Err(DecompressError::CRCError);
952 }
953 if crc != checker.crc {
954 return Err(DecompressError::CRCError);
955 }
956 }
957
958 Ok(output)
959 }
960
961 #[derive(Clone,Copy,Default)]
962 struct Token {
963 sym: u16,
964 distsym: u8,
965 len: u16,
966 dist: u16,
967 }
968
969 const TOKEN_EOB: Token = Token { sym: 256, distsym: 0, len: 0, dist: 0 };
970
971 impl Token {
972 fn from_literal(sym: u8) -> Self {
973 Self {
974 sym: u16::from(sym),
975 distsym: 0,
976 dist: 0,
977 len: 0,
978 }
979 }
980 fn from_match(dist: u16, len: u16) -> Self {
981 let sym = match len {
982 3..= 10 => 257 + len - 3,
983 11..= 18 => 265 + (len - 11) / 2,
984 19..= 34 => 269 + (len - 19) / 4,
985 35..= 66 => 273 + (len - 35) / 8,
986 67..=130 => 277 + (len - 67) / 16,
987 131..=257 => 281 + (len - 131) / 32,
988 _ => 285,
989 };
990 let distsym = if dist <= 4 {
991 (dist - 1) as u8
992 } else {
993 let bits = 16 - (dist - 1).leading_zeros();
994 (bits as u8) * 2 - 2 + if ((dist - 1) & (1 << (bits - 2))) != 0 { 1 } else { 0 }
995 };
996 Self {
997 sym, distsym, len, dist
998 }
999 }
1000 }
1001
1002 fn add_codes(lens: &[u8], stats: &mut [u32], toks: &mut Vec<(u8, u8)>) {
1003 let mut last = 42;
1004 let mut lcount = 0;
1005
1006 for &len in lens.iter() {
1007 if len == last {
1008 lcount += 1;
1009 } else {
1010 if last == 0 {
1011 while lcount > 10 {
1012 let run = lcount.min(138);
1013 stats[18] += 1;
1014 toks.push((18, run - 11));
1015 lcount -= run;
1016 }
1017 if lcount >= 3 {
1018 stats[17] += 1;
1019 toks.push((17, lcount - 3));
1020 lcount = 0;
1021 }
1022 for _ in 0..lcount {
1023 stats[0] += 1;
1024 toks.push((0, 0));
1025 }
1026 } else {
1027 while lcount >= 3 {
1028 let run = lcount.min(6);
1029 stats[16] += 1;
1030 toks.push((16, run - 3));
1031 lcount -= run;
1032 }
1033 for _ in 0..lcount {
1034 stats[last as usize] += 1;
1035 toks.push((last, 0));
1036 }
1037 }
1038 stats[len as usize] += 1;
1039 toks.push((len, 0));
1040 last = len;
1041 lcount = 0;
1042 }
1043 }
1044 if lcount > 0 {
1045 if last == 0 {
1046 while lcount > 10 {
1047 let run = lcount.min(138);
1048 stats[18] += 1;
1049 toks.push((18, run - 11));
1050 lcount -= run;
1051 }
1052 if lcount >= 3 {
1053 stats[17] += 1;
1054 toks.push((17, lcount - 3));
1055 lcount = 0;
1056 }
1057 for _ in 0..lcount {
1058 stats[0] += 1;
1059 toks.push((0, 0));
1060 }
1061 } else {
1062 while lcount >= 3 {
1063 let run = lcount.min(6);
1064 stats[16] += 1;
1065 toks.push((16, run - 3));
1066 lcount -= run;
1067 }
1068 for _ in 0..lcount {
1069 stats[last as usize] += 1;
1070 toks.push((last, 0));
1071 }
1072 }
1073 }
1074 }
1075
1076 ///! Deflate stream writer.
1077 pub struct DeflateWriter {
1078 dst: Vec<u8>,
1079 bits: u8,
1080 bbuf: u32,
1081 }
1082
1083 impl DeflateWriter {
1084 ///! Creates a new instance of `DeflateWriter` for a provided output.
1085 pub fn new(dst: Vec<u8>) -> Self {
1086 Self {
1087 dst,
1088 bits: 0,
1089 bbuf: 0,
1090 }
1091 }
1092 fn align(&mut self) {
1093 if (self.bits & 7) != 0 {
1094 self.bits += 8 - (self.bits & 7);
1095 }
1096 }
1097 fn flush(&mut self) {
1098 while self.bits >= 8 {
1099 self.dst.push(self.bbuf as u8);
1100 self.bbuf >>= 8;
1101 self.bits -= 8;
1102 }
1103 }
1104 fn write(&mut self, val: u16, len: u8) {
1105 self.flush();
1106 self.bbuf |= u32::from(val) << self.bits;
1107 self.bits += len;
1108 }
1109 ///! Finishes writing the stream and returns the output vector.
1110 pub fn end(mut self) -> Vec<u8> {
1111 self.flush();
1112 if self.bits > 0 {
1113 self.dst.push(self.bbuf as u8);
1114 }
1115 self.dst
1116 }
1117
1118 fn write_codes(&mut self, codes: &CodeHuff, dists: &DistHuff) {
1119 let mut stats = [0u32; 19];
1120 let mut toks = Vec::with_capacity(NUM_LITERALS + NUM_DISTS);
1121 let mut cw = [0u16; 19];
1122 let mut cl = [0u8; 19];
1123 let mut nc = 0;
1124
1125 add_codes(&codes.lens[..codes.num_codes], &mut stats, &mut toks);
1126 add_codes(&dists.lens[..dists.num_codes], &mut stats, &mut toks);
1127
1128 gen_tree(&mut cw, &mut cl, &mut nc, &mut stats, 7);
1129
1130 nc = cw.len();
1131 for &idx in LEN_RECODE.iter().rev() {
1132 if cl[idx] == 0 {
1133 nc -= 1;
1134 } else {
1135 break;
1136 }
1137 }
1138 if nc < 4 {
1139 nc = 4;
1140 }
1141 self.write((nc - 4) as u16, 4);
1142 for &idx in LEN_RECODE.iter().take(nc) {
1143 self.write(u16::from(cl[idx]), 3);
1144 }
1145 for &(sym, add) in toks.iter() {
1146 self.write(cw[sym as usize], cl[sym as usize]);
1147 match sym {
1148 16 => self.write(u16::from(add), 2),
1149 17 => self.write(u16::from(add), 3),
1150 18 => self.write(u16::from(add), 7),
1151 _ => {},
1152 };
1153 }
1154 }
1155 fn write_tokens(&mut self, src: &[Token], codes: &CodeHuff, dists: &DistHuff) {
1156 for &tok in src.iter() {
1157 self.write(codes.codes[tok.sym as usize], codes.lens[tok.sym as usize]);
1158 if tok.sym > 256 {
1159 self.write_len_bits(tok.len);
1160 self.write(dists.codes[tok.distsym as usize], dists.lens[tok.distsym as usize]);
1161 self.write_dist_bits(tok.dist);
1162 }
1163 }
1164 }
1165 fn write_len_bits(&mut self, len: u16) {
1166 let llen = len - 3;
1167 if llen >= 8 && llen < 255 {
1168 let bits = (16 - llen.leading_zeros() - 3) as u8;
1169 self.write(llen & ((1 << bits) - 1), bits);
1170 }
1171 }
1172 fn write_dist_bits(&mut self, dist: u16) {
1173 let ddist = dist - 1;
1174 if dist >= 4 {
1175 let bits = (16 - ddist.leading_zeros() - 2) as u8;
1176 self.write(ddist & ((1 << bits) - 1), bits);
1177 }
1178 }
1179 }
1180
1181 struct CodeHuff {
1182 is_fixed: bool,
1183 stats: [u32; NUM_LITERALS],
1184 codes: [u16; NUM_LITERALS],
1185 lens: [u8; NUM_LITERALS],
1186 num_codes: usize,
1187 }
1188
1189 impl CodeHuff {
1190 fn new(is_fixed: bool) -> Self {
1191 Self {
1192 is_fixed,
1193 stats: [0; NUM_LITERALS],
1194 codes: [0; NUM_LITERALS],
1195 lens: [0; NUM_LITERALS],
1196 num_codes: NUM_LITERALS,
1197 }
1198 }
1199 fn make_codes(&mut self, src: &[Token]) {
1200 if self.is_fixed {
1201 for i in 0..=143 {
1202 self.codes[i] = reverse_bits((i + 0x30) as u32, 8) as u16;
1203 self.lens[i] = 8;
1204 }
1205 for i in 144..=255 {
1206 self.codes[i] = reverse_bits((i + 0x100) as u32, 9) as u16;
1207 self.lens[i] = 9;
1208 }
1209 for i in 256..=279 {
1210 self.codes[i] = reverse_bits((i & 0x1F) as u32, 7) as u16;
1211 self.lens[i] = 7;
1212 }
1213 for i in 280..NUM_LITERALS {
1214 self.codes[i] = reverse_bits((i - 280 + 0xC0) as u32, 8) as u16;
1215 self.lens[i] = 8;
1216 }
1217 } else {
1218 for &tok in src.iter() {
1219 self.stats[tok.sym as usize] += 1;
1220 }
1221 gen_tree(&mut self.codes, &mut self.lens, &mut self.num_codes, &mut self.stats, 15);
1222 if self.num_codes < 257 {
1223 self.num_codes = 257;
1224 }
1225 }
1226 }
1227 }
1228
1229 struct DistHuff {
1230 is_fixed: bool,
1231 stats: [u32; NUM_DISTS],
1232 codes: [u16; NUM_DISTS],
1233 lens: [u8; NUM_DISTS],
1234 num_codes: usize,
1235 }
1236
1237 impl DistHuff {
1238 fn new(is_fixed: bool) -> Self {
1239 Self {
1240 is_fixed,
1241 stats: [0; NUM_DISTS],
1242 codes: [0; NUM_DISTS],
1243 lens: [0; NUM_DISTS],
1244 num_codes: NUM_DISTS,
1245 }
1246 }
1247 fn make_codes(&mut self, src: &[Token]) {
1248 if self.is_fixed {
1249 for i in 0..NUM_DISTS {
1250 self.codes[i] = reverse_bits(i as u32, 5) as u16;
1251 self.lens[i] = 5;
1252 }
1253 } else {
1254 for &tok in src.iter() {
1255 if tok.sym > 256 {
1256 self.stats[tok.distsym as usize] += 1;
1257 }
1258 }
1259 gen_tree(&mut self.codes, &mut self.lens, &mut self.num_codes, &mut self.stats, 15);
1260 if self.num_codes < 1 {
1261 self.num_codes = 1;
1262 }
1263 }
1264 }
1265 }
1266
1267 #[derive(Clone,Copy,Default)]
1268 struct Node {
1269 sym: u16,
1270 w: u16,
1271 idx0: u16,
1272 idx1: u16,
1273 }
1274
1275 const NODE_SYM: u16 = 65500;
1276
1277 struct Tree {
1278 nodes: [Node; NUM_LITERALS * 2],
1279 nnodes: usize,
1280 }
1281
1282 impl Tree {
1283 fn new() -> Self {
1284 Self {
1285 nodes: [Node::default(); NUM_LITERALS * 2],
1286 nnodes: 0,
1287 }
1288 }
1289 fn insert(&mut self, val: Node) {
1290 let mut idx = self.nnodes;
1291 for (i, nn) in self.nodes[..self.nnodes].iter().enumerate() {
1292 if nn.w > val.w {
1293 idx = i;
1294 break;
1295 }
1296 }
1297 if idx < self.nnodes {
1298 for i in (idx..self.nnodes).rev() {
1299 self.nodes[i + 1] = self.nodes[i];
1300 }
1301 }
1302 self.nodes[idx] = val;
1303 self.nnodes += 1;
1304 }
1305 fn trim(&mut self) {
1306 let mut start = self.nnodes;
1307 for (i, n) in self.nodes[..self.nnodes].iter().enumerate() {
1308 if n.w != 0 {
1309 start = i;
1310 break;
1311 }
1312 }
1313 if start != 0 {
1314 for i in 0..self.nnodes - start {
1315 self.nodes[i] = self.nodes[i + start];
1316 }
1317 self.nnodes -= start;
1318 }
1319 }
1320 fn build(&mut self) {
1321 if self.nnodes == 1 {
1322 self.nodes[0].w = 1;
1323 return;
1324 }
1325 let mut start = 0;
1326 while start + 1 < self.nnodes {
1327 let nn1 = self.nodes[start];
1328 let nn2 = self.nodes[start + 1];
1329 let n = Node {
1330 sym: NODE_SYM,
1331 w: nn1.w + nn2.w,
1332 idx0: start as u16,
1333 idx1: (start + 1) as u16,
1334 };
1335 self.nodes[start].w = 0;
1336 self.nodes[start + 1].w = 0;
1337 start += 2;
1338 self.insert(n);
1339 }
1340 if self.nnodes > 1 {
1341 self.assign_len(self.nnodes - 1, 0);
1342 }
1343 }
1344 fn assign_len(&mut self, idx: usize, len: u16) {
1345 if self.nodes[idx].sym == NODE_SYM {
1346 self.assign_len(self.nodes[idx].idx0 as usize, len + 1);
1347 self.assign_len(self.nodes[idx].idx1 as usize, len + 1);
1348 } else {
1349 self.nodes[idx].w = len;
1350 }
1351 }
1352 }
1353
1354 fn gen_tree(codes: &mut [u16], lens: &mut [u8], num_codes: &mut usize, stats: &mut [u32], max_bits: u8) {
1355 let mut tot_w = 0;
1356 for &w in stats.iter() {
1357 tot_w += w;
1358 }
1359 if tot_w == 0 {
1360 codes[0] = 0;
1361 lens[0] = 0;
1362 *num_codes = 0;
1363 return;
1364 }
1365 while tot_w > (1 << max_bits) {
1366 for w in stats.iter_mut() {
1367 *w = (*w + 1) >> 1;
1368 }
1369 tot_w = 0;
1370 for &w in stats.iter() {
1371 tot_w += w;
1372 }
1373 }
1374 let mut tree = Tree::new();
1375 for (sym, &w) in stats.iter().enumerate() {
1376 tree.insert(Node{ sym: sym as u16, w: w as u16, idx0: 64000, idx1: 64000 });
1377 }
1378 tree.trim();
1379 tree.build();
1380
1381 for n in tree.nodes[..tree.nnodes].iter() {
1382 if n.sym != NODE_SYM {
1383 lens[n.sym as usize] = n.w as u8;
1384 }
1385 }
1386 lengths_to_codes16(lens, codes);
1387 let mut sz = codes.len();
1388 for &len in lens.iter().rev() {
1389 if len != 0 {
1390 break;
1391 }
1392 sz -= 1;
1393 }
1394 *num_codes = sz;
1395 }
1396
1397 fn lengths_to_codes16(lens: &[u8], codes: &mut [u16]) {
1398 let mut bits = [0u32; 32];
1399 let mut pfx = [0u32; 33];
1400 for len in lens.iter() {
1401 let len = *len as usize;
1402 bits[len] += 1;
1403 }
1404 bits[0] = 0;
1405 let mut code = 0;
1406 for i in 0..bits.len() {
1407 code = (code + bits[i]) << 1;
1408 pfx[i + 1] = code;
1409 }
1410
1411 for (len, codes) in lens.iter().zip(codes.iter_mut()) {
1412 let len = *len as usize;
1413 if len != 0 {
1414 let bits = len as u8;
1415 *codes = reverse_bits(pfx[len], bits) as u16;
1416 pfx[len] += 1;
1417 } else {
1418 *codes = 0;
1419 }
1420 }
1421 }
1422
1423 trait LZParse {
1424 fn parse(&mut self, src: &[u8], dst: &mut Vec<Token>);
1425 }
1426
1427 struct NoParser {}
1428 impl LZParse for NoParser {
1429 fn parse(&mut self, src: &[u8], dst: &mut Vec<Token>) {
1430 dst.reserve(src.len());
1431 for &b in src.iter() {
1432 dst.push(Token::from_literal(b));
1433 }
1434 dst.push(TOKEN_EOB);
1435 }
1436 }
1437
1438 fn check_match(src1: &[u8], src2: &[u8]) -> u16 {
1439 let mut len = 0;
1440 for (&a, &b) in src1.iter().zip(src2.iter()) {
1441 if a != b {
1442 break;
1443 }
1444 len += 1;
1445 }
1446 len
1447 }
1448
1449 const HASH_SIZE: usize = 4096;
1450 const MAX_MATCH_LEN: usize = 258;
1451 const WINDOW_SIZE: usize = 32768 - MAX_MATCH_LEN;
1452 const NONEXT: usize = WINDOW_SIZE * 2;
1453
1454 struct MatchFinder<'a> {
1455 src: &'a [u8],
1456 pos: usize,
1457 hstart: [usize; HASH_SIZE],
1458 hend: [usize; HASH_SIZE],
1459 hnext: [usize; WINDOW_SIZE * 3],
1460 }
1461
1462 impl<'a> MatchFinder<'a> {
1463 fn new(src: &'a [u8]) -> Self {
1464 let mut obj = Self {
1465 src,
1466 pos: 0,
1467 hstart: [0; HASH_SIZE],
1468 hend: [0; HASH_SIZE],
1469 hnext: [0; WINDOW_SIZE * 3],
1470 };
1471 obj.build_hash();
1472 obj
1473 }
1474 fn hash(src: &[u8]) -> usize {
1475 let _ = src[2];
1476 (((u16::from(src[0]) << 10) ^ (u16::from(src[1]) << 5) ^ u16::from(src[2])) & ((HASH_SIZE as u16) - 1)) as usize
1477 }
1478 fn build_hash(&mut self) {
1479 for el in self.hstart.iter_mut() { *el = NONEXT; }
1480 for el in self.hend.iter_mut() { *el = NONEXT; }
1481 for el in self.hnext.iter_mut() { *el = NONEXT; }
1482 if self.pos + 3 >= self.src.len() {
1483 return;
1484 }
1485 let end = (self.src.len() - 3).min(self.pos + NONEXT);
1486 for i in (self.pos .. end).rev() {
1487 let key = Self::hash(&self.src[i..]);
1488 if self.hstart[key] == NONEXT {
1489 self.hstart[key] = i;
1490 self.hend[key] = i;
1491 self.hnext[key] = NONEXT;
1492 } else {
1493 self.hnext[self.hend[key]] = i;
1494 self.hend[key] = i;
1495 }
1496 }
1497 }
1498 fn find_match(&mut self) -> (u16, u16) {
1499 if self.pos == 0 || self.pos + 3 > self.src.len() {
1500 return (0, 0);
1501 }
1502 let key = Self::hash(&self.src[self.pos..]) as usize;
1503
1504 let mut best_pos = 0;
1505 let mut best_len = 0;
1506 let mut idx = self.hstart[key];
1507 while idx != NONEXT && idx + WINDOW_SIZE > self.pos {
1508 if idx < self.pos {
1509 let cur_len = check_match(&self.src[self.pos..], &self.src[idx..]);
1510 if cur_len > best_len {
1511 best_len = cur_len;
1512 best_pos = self.pos - idx;
1513 if best_len >= (MAX_MATCH_LEN as u16) {
1514 return (best_pos as u16, MAX_MATCH_LEN as u16);
1515 }
1516 }
1517 }
1518 idx = self.hnext[idx];
1519 }
1520 (best_pos as u16, best_len)
1521 }
1522 fn find_all_matches(&mut self, dists: &mut [u16; MAX_MATCH_LEN + 1]) {
1523 if self.pos == 0 || self.pos + 3 > self.src.len() {
1524 return;
1525 }
1526 let key = Self::hash(&self.src[self.pos..]) as usize;
1527 let mut idx = self.hstart[key];
1528 while idx != NONEXT && idx + WINDOW_SIZE > self.pos {
1529 if idx < self.pos {
1530 let cur_len = (check_match(&self.src[self.pos..], &self.src[idx..]) as usize).min(MAX_MATCH_LEN);
1531 if cur_len > 0 && dists[cur_len] == 0 {
1532 dists[cur_len] = (self.pos - idx) as u16;
1533 }
1534 }
1535 idx = self.hnext[idx];
1536 }
1537 }
1538 fn advance(&mut self, num: usize) {
1539 self.pos += num;
1540
1541 if self.pos >= NONEXT {
1542 let (_, tail) = self.src.split_at(self.pos - WINDOW_SIZE);
1543 self.src = tail;
1544 self.pos = WINDOW_SIZE;
1545 self.build_hash();
1546 } else {
1547 for (start, end) in self.hstart.iter_mut().zip(self.hend.iter_mut()) {
1548 let mut idx = *start;
1549 while idx != NONEXT && idx + WINDOW_SIZE < self.pos {
1550 idx = self.hnext[idx];
1551 *start = idx;
1552 }
1553 if idx == NONEXT {
1554 *end = NONEXT;
1555 }
1556 }
1557 }
1558 }
1559 fn get_sym(&self) -> u8 { self.src[self.pos] }
1560 fn is_end(&self) -> bool { self.pos >= self.src.len() }
1561 }
1562
1563 struct GreedyParser {}
1564 impl LZParse for GreedyParser {
1565 fn parse(&mut self, src: &[u8], dst: &mut Vec<Token>) {
1566 dst.reserve(src.len());
1567
1568 let mut matcher = MatchFinder::new(src);
1569 while !matcher.is_end() {
1570 let (best_pos, best_len) = matcher.find_match();
1571
1572 if best_len >= 3 {
1573 dst.push(Token::from_match(best_pos, best_len));
1574 matcher.advance(best_len as usize);
1575 } else {
1576 dst.push(Token::from_literal(matcher.get_sym()));
1577 matcher.advance(1);
1578 }
1579 }
1580 dst.push(TOKEN_EOB);
1581 }
1582 }
1583
1584 struct LazyParser {}
1585 impl LZParse for LazyParser {
1586 fn parse(&mut self, src: &[u8], dst: &mut Vec<Token>) {
1587 dst.reserve(src.len());
1588
1589 let mut matcher = MatchFinder::new(src);
1590 while !matcher.is_end() {
1591 let (best_pos, best_len) = matcher.find_match();
1592 if best_len >= 3 {
1593 let last_sym = matcher.get_sym();
1594 matcher.advance(1);
1595 if !matcher.is_end() {
1596 let (best_pos1, best_len1) = matcher.find_match();
1597 if best_len1 > best_len + 1 {
1598 dst.push(Token::from_literal(last_sym));
1599 dst.push(Token::from_match(best_pos1, best_len1));
1600 matcher.advance(best_len1 as usize);
1601 continue;
1602 }
1603 }
1604 dst.push(Token::from_match(best_pos, best_len));
1605 matcher.advance((best_len - 1) as usize);
1606 } else {
1607 dst.push(Token::from_literal(matcher.get_sym()));
1608 matcher.advance(1);
1609 }
1610 }
1611 dst.push(TOKEN_EOB);
1612 }
1613 }
1614
1615 #[derive(Clone,Copy)]
1616 struct TNode {
1617 price: u32,
1618 dist: u16,
1619 link: usize,
1620 }
1621
1622 impl Default for TNode {
1623 fn default() -> Self {
1624 Self {
1625 price: std::u32::MAX,
1626 dist: 0,
1627 link: 0,
1628 }
1629 }
1630 }
1631
1632 struct OptimalParser {
1633 trellis: Vec<TNode>,
1634 }
1635 impl OptimalParser {
1636 fn new() -> Self { Self::default() }
1637 fn sym_price(_sym: u8) -> u32 { 9 }
1638 fn match_price(dist: u16, _len: u16) -> u32 {
1639 if dist <= 4 {
1640 9 + 5
1641 } else {
1642 let bits = 16 - (dist - 1).leading_zeros();
1643 9 + 3 + bits
1644 }
1645 }
1646 }
1647 impl Default for OptimalParser {
1648 fn default() -> Self {
1649 Self {
1650 trellis: Vec::with_capacity(WINDOW_SIZE),
1651 }
1652 }
1653 }
1654 impl LZParse for OptimalParser {
1655 fn parse(&mut self, src: &[u8], dst: &mut Vec<Token>) {
1656 if src.is_empty() {
1657 dst.push(TOKEN_EOB);
1658 return;
1659 }
1660 dst.reserve(src.len());
1661
1662 self.trellis.clear();
1663 self.trellis.reserve(src.len() + 1);
1664 for _ in 0..=src.len() {
1665 self.trellis.push(TNode::default());
1666 }
1667 self.trellis[0].price = 0;
1668
1669 let mut matcher = MatchFinder::new(src);
1670 for i in 0..self.trellis.len() - 1 {
1671 let mut dists = [0; MAX_MATCH_LEN + 1];
1672 matcher.find_all_matches(&mut dists);
1673
1674 let sym = matcher.get_sym();
1675 let lprice = Self::sym_price(sym) + self.trellis[i].price;
1676 if self.trellis[i + 1].price > lprice {
1677 self.trellis[i + 1].price = lprice;
1678 self.trellis[i + 1].link = i;
1679 }
1680 for (len, &dist) in dists.iter().enumerate() {
1681 if dist != 0 {
1682 let mprice = Self::match_price(dist, len as u16) + self.trellis[i].price;
1683 if self.trellis[i + len].price > mprice {
1684 self.trellis[i + len].price = mprice;
1685 self.trellis[i + len].link = i;
1686 self.trellis[i].dist = dist;
1687 }
1688 }
1689 }
1690
1691 matcher.advance(1);
1692 }
1693 let mut idx = self.trellis.len() - 1;
1694 let mut nidx = self.trellis[idx].link;
1695 while idx > 0 {
1696 let oidx = idx;
1697 idx = nidx;
1698 nidx = self.trellis[idx].link;
1699 self.trellis[idx].link = oidx;
1700 }
1701
1702 let mut idx = 0;
1703 while idx < self.trellis.len() - 1 {
1704 let len = self.trellis[idx].link - idx;
1705 if len == 1 {
1706 dst.push(Token::from_literal(src[idx]));
1707 } else {
1708 dst.push(Token::from_match(self.trellis[idx].dist, len as u16));
1709 }
1710 idx = self.trellis[idx].link;
1711 }
1712
1713 dst.push(TOKEN_EOB);
1714 }
1715 }
1716
1717 ///! Deflate compression mode.
1718 #[derive(Clone,Copy,Debug,PartialEq)]
1719 pub enum DeflateMode {
1720 ///! No compression.
1721 NoCompr,
1722 ///! Fast compression.
1723 Fast,
1724 ///! Still fast but better compression.
1725 Better,
1726 ///! Slow but the best compression.
1727 Best,
1728 }
1729
1730 impl Default for DeflateMode {
1731 fn default() -> Self { DeflateMode::Better }
1732 }
1733
1734 pub const DEFLATE_MODE_DESCRIPTION: &str = "Deflate compression level.";
1735 ///! Deflate option for no compression.
1736 pub const DEFLATE_MODE_NONE: &str = "none";
1737 ///! Deflate option for fast compression.
1738 pub const DEFLATE_MODE_FAST: &str = "fast";
1739 ///! Deflate option for better compression.
1740 pub const DEFLATE_MODE_BETTER: &str = "better";
1741 ///! Deflate option for best compression.
1742 pub const DEFLATE_MODE_BEST: &str = "best";
1743
1744 ///! All possible option values for deflate compression.
1745 pub const DEFLATE_OPTION_VALUES: NAOptionDefinitionType = NAOptionDefinitionType::String(Some(&[DEFLATE_MODE_NONE, DEFLATE_MODE_FAST, DEFLATE_MODE_BETTER, DEFLATE_MODE_BEST]));
1746
1747 impl std::str::FromStr for DeflateMode {
1748 type Err = ();
1749
1750 fn from_str(s: &str) -> Result<Self, Self::Err> {
1751 match s {
1752 DEFLATE_MODE_NONE => Ok(DeflateMode::NoCompr),
1753 DEFLATE_MODE_FAST => Ok(DeflateMode::Fast),
1754 DEFLATE_MODE_BETTER => Ok(DeflateMode::Better),
1755 DEFLATE_MODE_BEST => Ok(DeflateMode::Best),
1756 _ => Err(()),
1757 }
1758 }
1759 }
1760
1761 impl ToString for DeflateMode {
1762 fn to_string(&self) -> String {
1763 match *self {
1764 DeflateMode::NoCompr => DEFLATE_MODE_NONE.to_string(),
1765 DeflateMode::Fast => DEFLATE_MODE_FAST.to_string(),
1766 DeflateMode::Better => DEFLATE_MODE_BETTER.to_string(),
1767 DeflateMode::Best => DEFLATE_MODE_BEST.to_string(),
1768 }
1769 }
1770 }
1771
1772 #[derive(Clone,Copy,Debug,PartialEq)]
1773 enum Mode {
1774 Copy,
1775 Fixed,
1776 Dynamic,
1777 }
1778
1779 const MAX_BLOCK_SIZE: usize = 65535;
1780
1781 ///! Deflate stream compressor.
1782 pub struct Deflate {
1783 mode: Mode,
1784 tokens: Vec<Token>,
1785 srcbuf: [u8; MAX_BLOCK_SIZE],
1786 ssize: usize,
1787 sum1: u32,
1788 sum2: u32,
1789 zlib_mode: bool,
1790 parser: Box<dyn LZParse + Send>,
1791 }
1792
1793 impl Deflate {
1794 ///! Creates a new instance of `Deflate`.
1795 pub fn new(mode: DeflateMode) -> Self {
1796 let (mode, parser) = match mode {
1797 DeflateMode::NoCompr => (Mode::Copy, Box::new(NoParser{}) as Box<dyn LZParse + Send>),
1798 DeflateMode::Fast => (Mode::Fixed, Box::new(GreedyParser{}) as Box<dyn LZParse + Send>),
1799 DeflateMode::Better => (Mode::Dynamic, Box::new(LazyParser{}) as Box<dyn LZParse + Send>),
1800 DeflateMode::Best => (Mode::Dynamic, Box::new(OptimalParser::new()) as Box<dyn LZParse + Send>),
1801 };
1802 Self {
1803 mode, parser,
1804 tokens: Vec::with_capacity(MAX_BLOCK_SIZE),
1805 srcbuf: [0; MAX_BLOCK_SIZE],
1806 ssize: 0,
1807 sum1: 1,
1808 sum2: 0,
1809 zlib_mode: false,
1810 }
1811 }
1812 ///! Writes zlib stream header.
1813 pub fn write_zlib_header(&mut self, wr: &mut DeflateWriter) {
1814 wr.write(8, 4);
1815 wr.write(7, 4);
1816 let level = match self.mode {
1817 Mode::Copy => 0x01,
1818 Mode::Fixed => 0x5E,
1819 Mode::Dynamic => 0x9C,
1820 // 0xDA for the strongest one
1821 };
1822 wr.write(level, 8);
1823 self.zlib_mode = true;
1824 }
1825 fn write_zlib_footer(&self, wr: &mut DeflateWriter) {
1826 wr.align();
1827 wr.write((self.sum2 >> 8) as u16, 8);
1828 wr.write((self.sum2 & 0xFF) as u16, 8);
1829 wr.write((self.sum1 >> 8) as u16, 8);
1830 wr.write((self.sum1 & 0xFF) as u16, 8);
1831 }
1832 ///! Queues data for compression.
1833 ///!
1834 ///! The data might be not actually compressed until [`compress_end`] is called.
1835 ///!
1836 ///! [`compress_end`]: ./struct.Deflate.html#method.compress_end
1837 pub fn compress(&mut self, src: &[u8], wr: &mut DeflateWriter) {
1838 let mut src = src;
1839 while !src.is_empty() {
1840 let clen = src.len().min(MAX_BLOCK_SIZE - self.ssize);
1841 let (head, tail) = src.split_at(clen);
1842 src = tail;
1843 self.srcbuf[self.ssize..][..clen].copy_from_slice(head);
1844 self.ssize += clen;
1845 if self.ssize == MAX_BLOCK_SIZE {
1846 self.do_block(wr, false);
1847 }
1848 }
1849 }
1850 ///! Tells the encoder to finish data compression.
1851 ///!
1852 ///! Complete data will be output after this call.
1853 pub fn compress_end(&mut self, wr: &mut DeflateWriter) {
1854 if self.ssize > 0 {
1855 self.do_block(wr, true);
1856 } else {
1857 wr.write(1, 1);
1858 wr.write(1, 2);
1859 wr.write(0, 7); //static EOF sym
1860 }
1861 if self.zlib_mode {
1862 self.write_zlib_footer(wr);
1863 }
1864 }
1865 ///! Tells the encoder to compress the data it received and flush it.
1866 pub fn compress_flush(&mut self, wr: &mut DeflateWriter) {
1867 if self.ssize > 0 {
1868 self.do_block(wr, false);
1869 }
1870 if (wr.bits & 7) != 0 {
1871 // write zero-length copy block for byte-alignment
1872 wr.write(0, 1);
1873 wr.write(0, 2);
1874 wr.align();
1875 wr.write(0, 16);
1876 wr.write(0xFFFF, 16);
1877 }
1878 }
1879 fn do_block(&mut self, wr: &mut DeflateWriter, final_block: bool) {
1880 const CRC_BASE: u32 = 65521;
1881 for &b in self.srcbuf[..self.ssize].iter() {
1882 self.sum1 += u32::from(b);
1883 if self.sum1 >= CRC_BASE {
1884 self.sum1 -= CRC_BASE;
1885 }
1886 self.sum2 += self.sum1;
1887 if self.sum2 >= CRC_BASE {
1888 self.sum2 -= CRC_BASE;
1889 }
1890 }
1891 match self.mode {
1892 Mode::Copy => {
1893 wr.write(final_block as u16, 1);
1894 wr.write(0, 2);
1895 wr.align();
1896 wr.write(self.ssize as u16, 16);
1897 wr.write(!self.ssize as u16, 16);
1898 for &b in self.srcbuf[..self.ssize].iter() {
1899 wr.write(u16::from(b), 8);
1900 }
1901 },
1902 Mode::Fixed => {
1903 wr.write(final_block as u16, 1);
1904 wr.write(1, 2);
1905 self.tokens.clear();
1906 self.parser.parse(&self.srcbuf[..self.ssize], &mut self.tokens);
1907 let mut codes = CodeHuff::new(true);
1908 codes.make_codes(&self.tokens);
1909 let mut dists = DistHuff::new(true);
1910 dists.make_codes(&self.tokens);
1911 wr.write_tokens(&self.tokens, &codes, &dists);
1912 },
1913 Mode::Dynamic => {
1914 wr.write(final_block as u16, 1);
1915 wr.write(2, 2);
1916 self.tokens.clear();
1917 self.parser.parse(&self.srcbuf[..self.ssize], &mut self.tokens);
1918 let mut codes = CodeHuff::new(false);
1919 codes.make_codes(&self.tokens);
1920 let mut dists = DistHuff::new(false);
1921 dists.make_codes(&self.tokens);
1922 wr.write((codes.num_codes - 257) as u16, 5);
1923 wr.write((dists.num_codes - 1) as u16, 5);
1924 wr.write_codes(&codes, &dists);
1925 wr.write_tokens(&self.tokens, &codes, &dists);
1926 },
1927 }
1928 self.ssize = 0;
1929 }
1930 }
1931
1932 #[cfg(test)]
1933 mod test {
1934 use super::*;
1935
1936 #[test]
1937 fn test_inflate1() {
1938 const TEST_DATA: &[u8] = &[
1939 0xF3, 0x48, 0xCD, 0xC9, 0xC9, 0xD7, 0x51, 0x28,
1940 0xCF, 0x2F, 0xCA, 0x49, 0x51, 0x04, 0x00 ];
1941 const TEST_REF: &[u8] = b"Hello, world!";
1942 let mut dst_buf = [0u8; 13];
1943 let len = Inflate::uncompress(TEST_DATA, &mut dst_buf).unwrap();
1944 assert_eq!(len, 13);
1945 for i in 0..len {
1946 assert_eq!(dst_buf[i], TEST_REF[i]);
1947 }
1948 }
1949 #[test]
1950 fn test_inflate2() {
1951 const TEST_DATA3: &[u8] = &[ 0x4B, 0x4C, 0x44, 0x80, 0x24, 0x54, 0x80, 0x2C, 0x06, 0x00 ];
1952 const TEST_REF3: &[u8] = b"aaaaaaaaaaaabbbbbbbbbbbbbbbaaaaabbbbbbb";
1953 let mut dst_buf = [0u8; 39];
1954
1955 let mut inflate = Inflate::new();
1956 let mut output_chunk = [0u8; 7];
1957 let mut output_pos = 0;
1958 for input in TEST_DATA3.chunks(3) {
1959 let mut repeat = false;
1960 loop {
1961 let ret = inflate.decompress_data(input, &mut output_chunk, repeat);
1962 match ret {
1963 Ok(len) => {
1964 for i in 0..len {
1965 dst_buf[output_pos + i] = output_chunk[i];
1966 }
1967 output_pos += len;
1968 break;
1969 },
1970 Err(DecompressError::ShortData) => {
1971 break;
1972 },
1973 Err(DecompressError::OutputFull) => {
1974 repeat = true;
1975 for i in 0..output_chunk.len() {
1976 dst_buf[output_pos + i] = output_chunk[i];
1977 }
1978 output_pos += output_chunk.len();
1979 },
1980 _ => {
1981 panic!("decompress error {:?}", ret.err().unwrap());
1982 },
1983 }
1984 }
1985 }
1986
1987 assert_eq!(output_pos, dst_buf.len());
1988 for i in 0..output_pos {
1989 assert_eq!(dst_buf[i], TEST_REF3[i]);
1990 }
1991 }
1992 #[test]
1993 fn test_inflate3() {
1994 const TEST_DATA: &[u8] = &[
1995 0x1F, 0x8B, 0x08, 0x08, 0xF6, 0x7B, 0x90, 0x5E, 0x02, 0x03, 0x31, 0x2E, 0x74, 0x78, 0x74, 0x00,
1996 0xE5, 0x95, 0x4B, 0x4E, 0xC3, 0x30, 0x10, 0x40, 0xF7, 0x39, 0xC5, 0x1C, 0x00, 0x16, 0x70, 0x83,
1997 0x0A, 0xB5, 0x3B, 0xE8, 0x82, 0x5E, 0x60, 0x1A, 0x4F, 0xE2, 0x11, 0xFE, 0x44, 0x1E, 0xA7, 0x69,
1998 0x6E, 0xCF, 0x38, 0xDD, 0xB0, 0x40, 0xA2, 0x46, 0x2D, 0x20, 0x2A, 0xE5, 0xAB, 0xCC, 0xE7, 0xBD,
1999 0x49, 0xAC, 0x6C, 0x03, 0x64, 0x4B, 0xD0, 0x71, 0x92, 0x0C, 0x06, 0x67, 0x88, 0x1D, 0x3C, 0xD9,
2000 0xC4, 0x92, 0x3D, 0x4A, 0xF3, 0x3C, 0x43, 0x4E, 0x23, 0x81, 0x8B, 0x07, 0x82, 0x1E, 0xF5, 0x90,
2001 0x23, 0x78, 0x6A, 0x56, 0x30, 0x60, 0xCA, 0x89, 0x4D, 0x4F, 0xC0, 0x01, 0x10, 0x06, 0xC2, 0xA4,
2002 0xA1, 0x44, 0xCD, 0xF6, 0x54, 0x50, 0xA8, 0x8D, 0xC1, 0x9C, 0x5F, 0x71, 0x37, 0x45, 0xC8, 0x63,
2003 0xCA, 0x8E, 0xC0, 0xE8, 0x23, 0x69, 0x56, 0x9A, 0x8D, 0x5F, 0xB6, 0xC9, 0x96, 0x53, 0x4D, 0x17,
2004 0xAB, 0xB9, 0xB0, 0x49, 0x14, 0x5A, 0x0B, 0x96, 0x82, 0x7C, 0xB7, 0x6F, 0x17, 0x35, 0xC7, 0x9E,
2005 0xDF, 0x78, 0xA3, 0xF1, 0xD0, 0xA2, 0x73, 0x1C, 0x7A, 0xD8, 0x2B, 0xB3, 0x5C, 0x90, 0x85, 0xBB,
2006 0x2A, 0x14, 0x2E, 0xF7, 0xD1, 0x19, 0x48, 0x0A, 0x23, 0x57, 0x45, 0x13, 0x3E, 0xD6, 0xA0, 0xBD,
2007 0xF2, 0x11, 0x7A, 0x22, 0x21, 0xAD, 0xE5, 0x70, 0x56, 0xA0, 0x9F, 0xA5, 0xA5, 0x03, 0x85, 0x2A,
2008 0xDE, 0x92, 0x00, 0x32, 0x61, 0x10, 0xAD, 0x27, 0x13, 0x7B, 0x5F, 0x98, 0x7F, 0x59, 0x83, 0xB8,
2009 0xB7, 0x35, 0x16, 0xEB, 0x12, 0x0F, 0x1E, 0xD9, 0x14, 0x0B, 0xCF, 0xEE, 0x6D, 0x91, 0xF8, 0x93,
2010 0x6E, 0x81, 0x3F, 0x7F, 0x41, 0xA4, 0x22, 0x1F, 0xB7, 0xE6, 0x85, 0x83, 0x9A, 0xA2, 0x61, 0x12,
2011 0x0D, 0x0F, 0x6D, 0x01, 0xBD, 0xB0, 0xE8, 0x1D, 0xEC, 0xD1, 0xA0, 0xBF, 0x1F, 0x4E, 0xFB, 0x55,
2012 0xBD, 0x73, 0xDD, 0x87, 0xB9, 0x53, 0x23, 0x17, 0xD3, 0xE2, 0xE9, 0x08, 0x87, 0x42, 0xFF, 0xCF,
2013 0x26, 0x42, 0xAE, 0x76, 0xB5, 0xAE, 0x97, 0x0C, 0x18, 0x78, 0xA0, 0x24, 0xE5, 0x54, 0x0C, 0x6E,
2014 0x60, 0x52, 0x79, 0x22, 0x57, 0xF5, 0x87, 0x78, 0x78, 0x04, 0x93, 0x46, 0xEF, 0xCB, 0x98, 0x96,
2015 0x8B, 0x65, 0x00, 0xB7, 0x36, 0xBD, 0x77, 0xA8, 0xBD, 0x5A, 0xAA, 0x1A, 0x09, 0x00, 0x00
2016 ];
2017
2018 let mut mr = MemoryReader::new_read(TEST_DATA);
2019 let mut br = ByteReader::new(&mut mr);
2020 let _dst_buf = gzip_decode(&mut br, false).unwrap();
2021
2022 // println!("{}", String::from_utf8_lossy(_dst_buf.as_slice()));
2023 }
2024 #[test]
2025 fn test_deflate_crc() {
2026 let output = Vec::with_capacity(20);
2027 let mut writer = DeflateWriter::new(output);
2028 let mut compr = Deflate::new(DeflateMode::NoCompr);
2029 compr.write_zlib_header(&mut writer);
2030 compr.compress(b"Hello, world!", &mut writer);
2031 compr.compress_end(&mut writer);
2032 let output = writer.end();
2033 assert_eq!(output.as_slice(), b"\x78\x01\x01\x0D\x00\xF2\xFFHello, world!\x20\x5E\x04\x8A");
2034 }
2035 fn deflate_test(mode: DeflateMode) {
2036 const SRC: &[u8] =
2037 b"The first day of Christmas,
2038 My true love sent to me
2039 A partridge in a pear tree.
2040
2041 The second day of Christmas,
2042 My true love sent to me
2043 Two turtle doves, and
2044 A partridge in a pear tree.
2045
2046 The third day of Christmas,
2047 My true love sent to me
2048 Three French hens,
2049 Two turtle doves, and
2050 A partridge in a pear tree.
2051
2052 The fourth day of Christmas,
2053 My true love sent to me
2054 Four colly birds,
2055 Three French hens,
2056 Two turtle doves, and
2057 A partridge in a pear tree.
2058
2059 The fifth day of Christmas,
2060 My true love sent to me
2061 Five gold rings,
2062 Four colly birds,
2063 Three French hens,
2064 Two turtle doves, and
2065 A partridge in a pear tree.";
2066 let output = Vec::with_capacity(SRC.len() + 16);
2067 let mut writer = DeflateWriter::new(output);
2068 let mut compr = Deflate::new(mode);
2069 compr.write_zlib_header(&mut writer);
2070 compr.compress(SRC, &mut writer);
2071 compr.compress_end(&mut writer);
2072 let output = writer.end();
2073 let mut uncompr = vec![0u8; SRC.len()];
2074 Inflate::uncompress(&output, &mut uncompr).unwrap();
2075 assert_eq!(SRC, uncompr.as_slice());
2076 }
2077 #[test]
2078 fn test_deflate_fast() {
2079 deflate_test(DeflateMode::Fast);
2080 }
2081 #[test]
2082 fn test_deflate_better() {
2083 deflate_test(DeflateMode::Better);
2084 }
2085 #[test]
2086 fn test_deflate_best() {
2087 deflate_test(DeflateMode::Best);
2088 }
2089 }