2509fc229116d5376a8aff2d65e1e7182079507b
[nihav.git] / nihav-core / src / compr / deflate.rs
1 //! Deflate format (RFC 1951) support.
2 //!
3 //! This module provides functionality for decompressing raw deflated streams via [`Inflate`] and gzip files (RFC 1952) via [`gzip_decode`] and compressing raw or zlib streams via [`Deflate`].
4 //!
5 //! [`Deflate`]: ./struct.Deflate.html
6 //! [`Inflate`]: ./struct.Inflate.html
7 //! [`gzip_decode`]: ./fn.gzip_decode.html
8 //!
9 //! # Examples
10 //!
11 //! Decompressing full input buffer into sufficiently large output buffer:
12 //! ```
13 //! # use nihav_core::compr::DecompressError;
14 //! use nihav_core::compr::deflate::Inflate;
15 //!
16 //! # fn decompress(input: &[u8]) -> Result<(), DecompressError> {
17 //! # let mut output_buffer = [0u8; 16];
18 //! let output_length = Inflate::uncompress(input, &mut output_buffer)?;
19 //! # Ok(())
20 //! # }
21 //! ```
22 //!
23 //! Decompressing input chunks into portions of output:
24 //! ```
25 //! use nihav_core::compr::DecompressError;
26 //! use nihav_core::compr::deflate::Inflate;
27 //!
28 //! # fn decompress(input_data: &[u8]) -> Result<(), DecompressError> {
29 //! let mut inflate = Inflate::new();
30 //! let mut dst_buf: Vec<u8> = Vec::new();
31 //! let mut output_chunk = [0u8; 1024];
32 //! for src in input_data.chunks(512) {
33 //! let mut repeat = false;
34 //! loop {
35 //! let ret = inflate.decompress_data(src, &mut output_chunk, repeat);
36 //! match ret {
37 //! Ok(len) => { // we got a buffer decoded successfully to the end
38 //! dst_buf.extend_from_slice(&output_chunk[..len]);
39 //! break;
40 //! },
41 //! Err(DecompressError::ShortData) => { // this block of data was fully read
42 //! break;
43 //! },
44 //! Err(DecompressError::OutputFull) => {
45 //! // the output buffer is full, flush it and continue decoding the same block
46 //! repeat = true;
47 //! dst_buf.extend_from_slice(&output_chunk);
48 //! },
49 //! Err(err) => {
50 //! return Err(err);
51 //! },
52 //! }
53 //! }
54 //! }
55 //! # Ok(())
56 //! # }
57 //! ```
58 //!
59 //! Compressing input buffer into zlib stream:
60 //! ```
61 //! use nihav_core::compr::deflate::{Deflate, DeflateMode, DeflateWriter};
62 //!
63 //! # fn compress(input: &[u8]) {
64 //! let output = Vec::with_capacity(input.len() * 65540 / 65535 + 6);
65 //! let mut writer = DeflateWriter::new(output);
66 //! let mut compr = Deflate::new(DeflateMode::Fast);
67 //! compr.write_zlib_header(&mut writer);
68 //! compr.compress(input, &mut writer);
69 //! compr.compress_end(&mut writer);
70 //! let output = writer.end();
71 //! # }
72 //! ```
73
74 use crate::options::NAOptionDefinitionType;
75 use crate::io::byteio::*;
76 use crate::io::bitreader::*;
77 use crate::io::codebook::*;
78 use super::*;
79
80 const NUM_LITERALS: usize = 287;
81 const NUM_DISTS: usize = 32;
82
83 struct FixedLenCodeReader {}
84
85 impl CodebookDescReader<u16> for FixedLenCodeReader {
86 fn bits(&mut self, idx: usize) -> u8 {
87 if idx < 144 { 8 }
88 else if idx < 256 { 9 }
89 else if idx < 280 { 7 }
90 else { 8 }
91 }
92 #[allow(clippy::identity_op)]
93 fn code(&mut self, idx: usize) -> u32 {
94 let base = idx as u32;
95 let bits = self.bits(idx);
96 if idx < 144 { reverse_bits(base + 0x30, bits) }
97 else if idx < 256 { reverse_bits(base + 0x190 - 144, bits) }
98 else if idx < 280 { reverse_bits(base + 0x000 - 256, bits) }
99 else { reverse_bits(base + 0xC0 - 280, bits) }
100 }
101 fn sym (&mut self, idx: usize) -> u16 { idx as u16 }
102 fn len(&mut self) -> usize { NUM_LITERALS + 1 }
103 }
104
105 #[derive(Clone,Copy,Default)]
106 struct BitReaderState {
107 pos: usize,
108 bitbuf: u32,
109 bits: u8,
110 }
111
112 struct CurrentSource<'a> {
113 src: &'a [u8],
114 br: BitReaderState,
115 }
116
117 impl<'a> CurrentSource<'a> {
118 fn new(src: &'a [u8], br: BitReaderState) -> Self {
119 let mut newsrc = Self { src, br };
120 newsrc.br.pos = 0;
121 newsrc.refill();
122 newsrc
123 }
124 fn reinit(src: &'a [u8], br: BitReaderState) -> Self {
125 let mut newsrc = Self { src, br };
126 newsrc.refill();
127 newsrc
128 }
129 fn refill(&mut self) {
130 while (self.br.bits <= 24) && (self.br.pos < self.src.len()) {
131 self.br.bitbuf |= u32::from(self.src[self.br.pos]) << self.br.bits;
132 self.br.bits += 8;
133 self.br.pos += 1;
134 }
135 }
136 fn skip_cache(&mut self, nbits: u8) {
137 self.br.bitbuf >>= nbits;
138 self.br.bits -= nbits;
139 }
140 fn read(&mut self, nbits: u8) -> BitReaderResult<u32> {
141 if nbits == 0 { return Ok(0); }
142 if nbits > 16 { return Err(BitReaderError::TooManyBitsRequested); }
143 if self.br.bits < nbits {
144 self.refill();
145 if self.br.bits < nbits { return Err(BitReaderError::BitstreamEnd); }
146 }
147 let ret = self.br.bitbuf & ((1 << nbits) - 1);
148 self.skip_cache(nbits);
149 Ok(ret)
150 }
151 fn read_bool(&mut self) -> BitReaderResult<bool> {
152 if self.br.bits == 0 {
153 self.refill();
154 if self.br.bits == 0 { return Err(BitReaderError::BitstreamEnd); }
155 }
156 let ret = (self.br.bitbuf & 1) != 0;
157 self.skip_cache(1);
158 Ok(ret)
159 }
160 fn peek(&mut self, nbits: u8) -> u32 {
161 if nbits == 0 || nbits > 16 { return 0; }
162 if self.br.bits < nbits {
163 self.refill();
164 }
165 self.br.bitbuf & ((1 << nbits) - 1)
166 }
167 fn skip(&mut self, nbits: u32) -> BitReaderResult<()> {
168 if u32::from(self.br.bits) >= nbits {
169 self.skip_cache(nbits as u8);
170 } else {
171 unreachable!();
172 }
173 Ok(())
174 }
175 fn align(&mut self) {
176 let b = self.br.bits & 7;
177 if b != 0 {
178 self.skip_cache(b);
179 }
180 }
181 fn left(&self) -> isize {
182 ((self.src.len() as isize) - (self.br.pos as isize)) * 8 + (self.br.bits as isize)
183 }
184 }
185
186 impl<'a, S: Copy> CodebookReader<S> for CurrentSource<'a> {
187 fn read_cb(&mut self, cb: &Codebook<S>) -> CodebookResult<S> {
188 let mut esc = true;
189 let mut idx = 0;
190 let mut lut_bits = cb.lut_bits;
191 let orig_br = self.br;
192 while esc {
193 let lut_idx = (self.peek(lut_bits) as usize) + (idx as usize);
194 if cb.table[lut_idx] == TABLE_FILL_VALUE { return Err(CodebookError::InvalidCode); }
195 let bits = cb.table[lut_idx] & 0x7F;
196 esc = (cb.table[lut_idx] & 0x80) != 0;
197 idx = (cb.table[lut_idx] >> 8) as usize;
198 let skip_bits = if esc { u32::from(lut_bits) } else { bits };
199 if (skip_bits as isize) > self.left() {
200 self.br = orig_br;
201 self.refill();
202 return Err(CodebookError::MemoryError);
203 }
204 self.skip(skip_bits as u32).unwrap();
205 lut_bits = bits as u8;
206 }
207 Ok(cb.syms[idx])
208 }
209 }
210
211 enum InflateState {
212 Start,
213 BlockStart,
214 BlockMode,
215 StaticBlockLen,
216 StaticBlockInvLen(u32),
217 StaticBlockCopy(usize),
218 FixedBlock,
219 FixedBlockLengthExt(usize, u8),
220 FixedBlockDist(usize),
221 FixedBlockDistExt(usize, usize, u8),
222 FixedBlockCopy(usize, usize),
223 FixedBlockLiteral(u8),
224 DynBlockHlit,
225 DynBlockHdist,
226 DynBlockHclen,
227 DynLengths(usize),
228 DynCodeLengths,
229 DynCodeLengthsAdd(usize),
230 DynBlock,
231 DynBlockLengthExt(usize, u8),
232 DynBlockDist(usize),
233 DynBlockDistExt(usize, usize, u8),
234 DynCopy(usize, usize),
235 DynBlockLiteral(u8),
236 End,
237 }
238
239 ///! The decompressor for deflated streams (RFC 1951).
240 pub struct Inflate {
241 br: BitReaderState,
242 fix_len_cb: Codebook<u16>,
243
244 buf: [u8; 65536],
245 bpos: usize,
246 output_idx: usize,
247 full_pos: usize,
248
249 state: InflateState,
250 final_block: bool,
251 hlit: usize,
252 hdist: usize,
253 dyn_len_cb: Option<Codebook<u32>>,
254 dyn_lit_cb: Option<Codebook<u32>>,
255 dyn_dist_cb: Option<Codebook<u32>>,
256 len_lengths: [u8; 19],
257 all_lengths: [u8; NUM_LITERALS + NUM_DISTS],
258 cur_len_idx: usize,
259 }
260
261 const LENGTH_ADD_BITS: [u8; 29] = [
262 0, 0, 0, 0, 0, 0, 0, 0, 1, 1,
263 1, 1, 2, 2, 2, 2, 3, 3, 3, 3,
264 4, 4, 4, 4, 5, 5, 5, 5, 0
265 ];
266 const LENGTH_BASE: [u16; 29] = [
267 3, 4, 5, 6, 7, 8, 9, 10, 11, 13,
268 15, 17, 19, 23, 27, 31, 35, 43, 51, 59,
269 67, 83, 99, 115, 131, 163, 195, 227, 258
270 ];
271 const DIST_ADD_BITS: [u8; 30] = [
272 0, 0, 0, 0, 1, 1, 2, 2, 3, 3,
273 4, 4, 5, 5, 6, 6, 7, 7, 8, 8,
274 9, 9, 10, 10, 11, 11, 12, 12, 13, 13
275 ];
276 const DIST_BASE: [u16; 30] = [
277 1, 2, 3, 4, 5, 7, 9, 13, 17, 25,
278 33, 49, 65, 97, 129, 193, 257, 385, 513, 769,
279 1025, 1537, 2049, 3073, 4097, 6145, 8193, 12289, 16385, 24577
280 ];
281 const LEN_RECODE: [usize; 19] = [
282 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15
283 ];
284 const REPEAT_BITS: [u8; 3] = [ 2, 3, 7 ];
285 const REPEAT_BASE: [u8; 3] = [ 3, 3, 11 ];
286
287 macro_rules! read_bits {
288 ($self: expr, $csrc: expr, $bits: expr) => ({
289 if $csrc.left() < $bits as isize {
290 $self.br = $csrc.br;
291 return Err(DecompressError::ShortData);
292 }
293 $csrc.read($bits).unwrap()
294 })
295 }
296
297 macro_rules! read_cb {
298 ($self: expr, $csrc: expr, $cb: expr) => ({
299 let ret = $csrc.read_cb($cb);
300 if let Err(CodebookError::MemoryError) = ret {
301 $self.br = $csrc.br;
302 return Err(DecompressError::ShortData);
303 }
304 match ret {
305 Ok(val) => val,
306 Err(_) => {
307 $self.state = InflateState::End;
308 return Err(DecompressError::InvalidData);
309 },
310 }
311 })
312 }
313
314 impl Inflate {
315 ///! Creates a new instance of `Inflate` struct.
316 pub fn new() -> Self {
317 let mut cr = FixedLenCodeReader {};
318 let fix_len_cb = Codebook::new(&mut cr, CodebookMode::LSB).unwrap();
319 Self {
320 br: BitReaderState::default(),
321 fix_len_cb,
322
323 buf: [0; 65536],
324 bpos: 0,
325 output_idx: 0,
326 full_pos: 0,
327
328 state: InflateState::Start,
329 final_block: false,
330 dyn_len_cb: None,
331 dyn_lit_cb: None,
332 dyn_dist_cb: None,
333 hlit: 0,
334 hdist: 0,
335 len_lengths: [0; 19],
336 all_lengths: [0; NUM_LITERALS + NUM_DISTS],
337 cur_len_idx: 0,
338 }
339 }
340 fn put_literal(&mut self, val: u8) {
341 self.buf[self.bpos] = val;
342 self.bpos = (self.bpos + 1) & (self.buf.len() - 1);
343 self.full_pos += 1;
344 }
345 fn lz_copy(&mut self, offset: usize, len: usize, dst: &mut [u8]) -> DecompressResult<()> {
346 let mask = self.buf.len() - 1;
347 if offset > self.full_pos {
348 return Err(DecompressError::InvalidData);
349 }
350 let cstart = (self.bpos.wrapping_sub(offset)) & mask;
351 for i in 0..len {
352 self.buf[(self.bpos + i) & mask] = self.buf[(cstart + i) & mask];
353 dst[i] = self.buf[(cstart + i) & mask];
354 }
355 self.bpos = (self.bpos + len) & mask;
356 self.full_pos += len;
357 Ok(())
358 }
359 ///! Reports whether decoder has finished decoding the input.
360 pub fn is_finished(&self) -> bool {
361 match self.state {
362 InflateState::End => true,
363 _ => false,
364 }
365 }
366 ///! Reports the current amount of bytes output into the destination buffer after the last run.
367 pub fn get_current_output_size(&self) -> usize { self.output_idx }
368 ///! Reports the total amount of bytes decoded so far.
369 pub fn get_total_output_size(&self) -> usize { self.bpos }
370 ///! Tries to decompress input data and write it to the output buffer.
371 ///!
372 ///! Since the decompressor can work with arbitrary input and output chunks its return value may have several meanings:
373 ///! * `Ok(len)` means the stream has been fully decoded and then number of bytes output into the destination buffer is returned.
374 ///! * [`DecompressError::ShortData`] means the input stream has been fully read but more data is needed.
375 ///! * [`DecompressError::OutputFull`] means the output buffer is full and should be flushed. Then decoding should continue on the same input block with `continue_block` parameter set to `true`.
376 ///!
377 ///! [`DecompressError::ShortData`]: ../enum.DecompressError.html#variant.ShortData
378 ///! [`DecompressError::OutputFull`]: ../enum.DecompressError.html#variant.OutputFull
379 pub fn decompress_data(&mut self, src: &[u8], dst: &mut [u8], continue_block: bool) -> DecompressResult<usize> {
380 self.decompress_data_internal(src, dst, continue_block, false)
381 }
382 ///! Tries to decompress whole input chunk to the output buffer.
383 pub fn decompress_block(&mut self, src: &[u8], dst: &mut [u8]) -> DecompressResult<usize> {
384 self.decompress_data_internal(src, dst, false, true)
385 }
386 #[allow(clippy::comparison_chain)]
387 fn decompress_data_internal(&mut self, src: &[u8], dst: &mut [u8], continue_block: bool, do_one_block: bool) -> DecompressResult<usize> {
388 if src.is_empty() || dst.is_empty() {
389 return Err(DecompressError::InvalidArgument);
390 }
391 let mut csrc = if !continue_block {
392 CurrentSource::new(src, self.br)
393 } else {
394 self.output_idx = 0;
395 CurrentSource::reinit(src, self.br)
396 };
397 if do_one_block {
398 self.output_idx = 0;
399 }
400 // check for zlib stream header
401 if let (&InflateState::Start, true) = (&self.state, src.len() > 2) {
402 let cm = src[0] & 0xF;
403 let cinfo = src[0] >> 4;
404 let hdr = (u16::from(src[0]) << 8) | u16::from(src[1]);
405 if cm == 8 && cinfo <= 7 && (hdr % 31) == 0 {
406 csrc.skip(16).unwrap();
407 }
408 }
409 'main: loop {
410 match self.state {
411 InflateState::Start | InflateState::BlockStart => {
412 if csrc.left() == 0 {
413 if do_one_block {
414 return Ok(self.output_idx);
415 }
416 self.br = csrc.br;
417 return Err(DecompressError::ShortData);
418 }
419 self.final_block = csrc.read_bool().unwrap();
420 self.state = InflateState::BlockMode;
421 },
422 InflateState::BlockMode => {
423 let bmode = read_bits!(self, csrc, 2);
424 match bmode {
425 0 => {
426 csrc.align();
427 self.state = InflateState::StaticBlockLen;
428 },
429 1 => { self.state = InflateState::FixedBlock; },
430 2 => { self.state = InflateState::DynBlockHlit; },
431 _ => {
432 self.state = InflateState::End;
433 return Err(DecompressError::InvalidHeader);
434 },
435 };
436 },
437 InflateState::StaticBlockLen => {
438 let len = read_bits!(self, csrc, 16);
439 self.state = InflateState::StaticBlockInvLen(len);
440 },
441 InflateState::StaticBlockInvLen(len) => {
442 let inv_len = read_bits!(self, csrc, 16);
443 if (len ^ inv_len) != 0xFFFF {
444 self.state = InflateState::End;
445 return Err(DecompressError::InvalidHeader);
446 }
447 self.state = InflateState::StaticBlockCopy(len as usize);
448 },
449 InflateState::StaticBlockCopy(len) => {
450 for i in 0..len {
451 if csrc.left() < 8 {
452 self.br = csrc.br;
453 self.state = InflateState::StaticBlockCopy(len - i);
454 return Err(DecompressError::ShortData);
455 }
456 let val = csrc.read(8).unwrap() as u8;
457 self.put_literal(val);
458 }
459 self.state = InflateState::BlockStart;
460 }
461 InflateState::FixedBlock => {
462 let val = read_cb!(self, csrc, &self.fix_len_cb);
463 if val < 256 {
464 if self.output_idx >= dst.len() {
465 self.br = csrc.br;
466 self.state = InflateState::FixedBlockLiteral(val as u8);
467 return Err(DecompressError::OutputFull);
468 }
469 self.put_literal(val as u8);
470 dst[self.output_idx] = val as u8;
471 self.output_idx += 1;
472 } else if val == 256 {
473 if self.final_block {
474 self.state = InflateState::End;
475 return Ok(self.output_idx);
476 } else {
477 self.state = InflateState::BlockStart;
478 }
479 } else {
480 let len_idx = (val - 257) as usize;
481 if len_idx >= LENGTH_BASE.len() {
482 self.state = InflateState::End;
483 return Err(DecompressError::InvalidData);
484 }
485 let len_bits = LENGTH_ADD_BITS[len_idx];
486 let add_base = LENGTH_BASE[len_idx] as usize;
487 if len_bits > 0 {
488 self.state = InflateState::FixedBlockLengthExt(add_base, len_bits);
489 } else {
490 self.state = InflateState::FixedBlockDist(add_base);
491 }
492 }
493 },
494 InflateState::FixedBlockLiteral(sym) => {
495 if self.output_idx >= dst.len() {
496 self.br = csrc.br;
497 return Err(DecompressError::OutputFull);
498 }
499 self.put_literal(sym);
500 dst[self.output_idx] = sym;
501 self.output_idx += 1;
502 self.state = InflateState::FixedBlock;
503 },
504 InflateState::FixedBlockLengthExt(base, bits) => {
505 let add = read_bits!(self, csrc, bits) as usize;
506 self.state = InflateState::FixedBlockDist(base + add);
507 },
508 InflateState::FixedBlockDist(length) => {
509 let dist_idx = reverse_bits(read_bits!(self, csrc, 5), 5) as usize;
510 if dist_idx >= DIST_BASE.len() {
511 self.state = InflateState::End;
512 return Err(DecompressError::InvalidData);
513 }
514 let dist_bits = DIST_ADD_BITS[dist_idx];
515 let dist_base = DIST_BASE[dist_idx] as usize;
516 if dist_bits == 0 {
517 self.state = InflateState::FixedBlockCopy(length, dist_base);
518 } else {
519 self.state = InflateState::FixedBlockDistExt(length, dist_base, dist_bits);
520 }
521 },
522 InflateState::FixedBlockDistExt(length, base, bits) => {
523 let add = read_bits!(self, csrc, bits) as usize;
524 self.state = InflateState::FixedBlockCopy(length, base + add);
525 },
526 InflateState::FixedBlockCopy(length, dist) => {
527 if self.output_idx + length > dst.len() {
528 let copy_size = dst.len() - self.output_idx;
529 let ret = self.lz_copy(dist, copy_size, &mut dst[self.output_idx..]);
530 if ret.is_err() {
531 self.state = InflateState::End;
532 return Err(DecompressError::InvalidData);
533 }
534 self.output_idx += copy_size;
535 self.br = csrc.br;
536 self.state = InflateState::FixedBlockCopy(length - copy_size, dist);
537 return Err(DecompressError::OutputFull);
538 }
539 let ret = self.lz_copy(dist, length, &mut dst[self.output_idx..]);
540 if ret.is_err() {
541 self.state = InflateState::End;
542 return Err(DecompressError::InvalidData);
543 }
544 self.output_idx += length;
545 self.state = InflateState::FixedBlock;
546 }
547 InflateState::DynBlockHlit => {
548 self.hlit = (read_bits!(self, csrc, 5) as usize) + 257;
549 if self.hlit >= 287 {
550 self.state = InflateState::End;
551 return Err(DecompressError::InvalidHeader);
552 }
553 self.state = InflateState::DynBlockHdist;
554 }
555 InflateState::DynBlockHdist => {
556 self.hdist = (read_bits!(self, csrc, 5) as usize) + 1;
557 self.state = InflateState::DynBlockHclen;
558 },
559 InflateState::DynBlockHclen => {
560 let hclen = (read_bits!(self, csrc, 4) as usize) + 4;
561 self.cur_len_idx = 0;
562 self.len_lengths = [0; 19];
563 self.all_lengths = [0; NUM_LITERALS + NUM_DISTS];
564 self.state = InflateState::DynLengths(hclen);
565 },
566 InflateState::DynLengths(len) => {
567 for i in 0..len {
568 if csrc.left() < 3 {
569 self.br = csrc.br;
570 self.state = InflateState::DynLengths(len - i);
571 return Err(DecompressError::ShortData);
572 }
573 self.len_lengths[LEN_RECODE[self.cur_len_idx]] = csrc.read(3).unwrap() as u8;
574 self.cur_len_idx += 1;
575 }
576 let mut len_codes = [ShortCodebookDesc { code: 0, bits: 0 }; 19];
577 lengths_to_codes(&self.len_lengths, &mut len_codes)?;
578 let mut cr = ShortCodebookDescReader::new(len_codes.to_vec());
579 let ret = Codebook::new(&mut cr, CodebookMode::LSB);
580 if ret.is_err() {
581 self.state = InflateState::End;
582 return Err(DecompressError::InvalidHeader);
583 }
584 self.dyn_len_cb = Some(ret.unwrap());
585 self.cur_len_idx = 0;
586 self.state = InflateState::DynCodeLengths;
587 },
588 InflateState::DynCodeLengths => {
589 if let Some(ref len_cb) = self.dyn_len_cb {
590 while self.cur_len_idx < self.hlit + self.hdist {
591 let ret = csrc.read_cb(len_cb);
592 let val = match ret {
593 Ok(val) => val,
594 Err(CodebookError::MemoryError) => {
595 self.br = csrc.br;
596 return Err(DecompressError::ShortData);
597 },
598 Err(_) => {
599 self.state = InflateState::End;
600 return Err(DecompressError::InvalidHeader);
601 },
602 };
603 if val < 16 {
604 self.all_lengths[self.cur_len_idx] = val as u8;
605 self.cur_len_idx += 1;
606 } else {
607 let idx = (val as usize) - 16;
608 if idx > 2 {
609 self.state = InflateState::End;
610 return Err(DecompressError::InvalidHeader);
611 }
612 self.state = InflateState::DynCodeLengthsAdd(idx);
613 continue 'main;
614 }
615 }
616 let (lit_lengths, dist_lengths) = self.all_lengths.split_at(self.hlit);
617
618 let mut lit_codes = [ShortCodebookDesc { code: 0, bits: 0 }; NUM_LITERALS];
619 lengths_to_codes(&lit_lengths, &mut lit_codes)?;
620 let mut cr = ShortCodebookDescReader::new(lit_codes.to_vec());
621 let ret = Codebook::new(&mut cr, CodebookMode::LSB);
622 if ret.is_err() { return Err(DecompressError::InvalidHeader); }
623 self.dyn_lit_cb = Some(ret.unwrap());
624
625 let mut dist_codes = [ShortCodebookDesc { code: 0, bits: 0 }; NUM_DISTS];
626 lengths_to_codes(&dist_lengths[..self.hdist], &mut dist_codes)?;
627 let mut cr = ShortCodebookDescReader::new(dist_codes.to_vec());
628 let ret = Codebook::new(&mut cr, CodebookMode::LSB);
629 if ret.is_err() { return Err(DecompressError::InvalidHeader); }
630 self.dyn_dist_cb = Some(ret.unwrap());
631
632 self.state = InflateState::DynBlock;
633 } else {
634 unreachable!();
635 }
636 },
637 InflateState::DynCodeLengthsAdd(mode) => {
638 let base = REPEAT_BASE[mode] as usize;
639 let bits = REPEAT_BITS[mode];
640 let len = base + read_bits!(self, csrc, bits) as usize;
641 if self.cur_len_idx + len > self.hlit + self.hdist {
642 self.state = InflateState::End;
643 return Err(DecompressError::InvalidHeader);
644 }
645 let rpt = if mode == 0 {
646 if self.cur_len_idx == 0 {
647 self.state = InflateState::End;
648 return Err(DecompressError::InvalidHeader);
649 }
650 self.all_lengths[self.cur_len_idx - 1]
651 } else {
652 0
653 };
654 for _ in 0..len {
655 self.all_lengths[self.cur_len_idx] = rpt;
656 self.cur_len_idx += 1;
657 }
658 self.state = InflateState::DynCodeLengths;
659 },
660 InflateState::DynBlock => {
661 if let Some(ref lit_cb) = self.dyn_lit_cb {
662 let val = read_cb!(self, csrc, lit_cb);
663 if val < 256 {
664 if self.output_idx >= dst.len() {
665 self.br = csrc.br;
666 self.state = InflateState::DynBlockLiteral(val as u8);
667 return Err(DecompressError::OutputFull);
668 }
669 self.put_literal(val as u8);
670 dst[self.output_idx] = val as u8;
671 self.output_idx += 1;
672 } else if val == 256 {
673 if self.final_block {
674 self.state = InflateState::End;
675 return Ok(self.output_idx);
676 } else {
677 self.state = InflateState::BlockStart;
678 }
679 } else {
680 let len_idx = (val - 257) as usize;
681 if len_idx >= LENGTH_BASE.len() {
682 self.state = InflateState::End;
683 return Err(DecompressError::InvalidData);
684 }
685 let len_bits = LENGTH_ADD_BITS[len_idx];
686 let add_base = LENGTH_BASE[len_idx] as usize;
687 if len_bits > 0 {
688 self.state = InflateState::DynBlockLengthExt(add_base, len_bits);
689 } else {
690 self.state = InflateState::DynBlockDist(add_base);
691 }
692 }
693 } else {
694 unreachable!();
695 }
696 },
697 InflateState::DynBlockLiteral(sym) => {
698 if self.output_idx >= dst.len() {
699 self.br = csrc.br;
700 return Err(DecompressError::OutputFull);
701 }
702 self.put_literal(sym);
703 dst[self.output_idx] = sym;
704 self.output_idx += 1;
705 self.state = InflateState::DynBlock;
706 },
707 InflateState::DynBlockLengthExt(base, bits) => {
708 let add = read_bits!(self, csrc, bits) as usize;
709 self.state = InflateState::DynBlockDist(base + add);
710 },
711 InflateState::DynBlockDist(length) => {
712 if let Some(ref dist_cb) = self.dyn_dist_cb {
713 let dist_idx = read_cb!(self, csrc, dist_cb) as usize;
714 if dist_idx >= DIST_BASE.len() {
715 self.state = InflateState::End;
716 return Err(DecompressError::InvalidData);
717 }
718 let dist_bits = DIST_ADD_BITS[dist_idx];
719 let dist_base = DIST_BASE[dist_idx] as usize;
720 if dist_bits == 0 {
721 self.state = InflateState::DynCopy(length, dist_base);
722 } else {
723 self.state = InflateState::DynBlockDistExt(length, dist_base, dist_bits);
724 }
725 } else {
726 unreachable!();
727 }
728 },
729 InflateState::DynBlockDistExt(length, base, bits) => {
730 let add = read_bits!(self, csrc, bits) as usize;
731 self.state = InflateState::DynCopy(length, base + add);
732 },
733 InflateState::DynCopy(length, dist) => {
734 if self.output_idx + length > dst.len() {
735 let copy_size = dst.len() - self.output_idx;
736 let ret = self.lz_copy(dist, copy_size, &mut dst[self.output_idx..]);
737 if ret.is_err() {
738 self.state = InflateState::End;
739 return Err(DecompressError::InvalidData);
740 }
741 self.output_idx += copy_size;
742 self.br = csrc.br;
743 self.state = InflateState::DynCopy(length - copy_size, dist);
744 return Err(DecompressError::OutputFull);
745 }
746 let ret = self.lz_copy(dist, length, &mut dst[self.output_idx..]);
747 if ret.is_err() {
748 self.state = InflateState::End;
749 return Err(DecompressError::InvalidData);
750 }
751 self.output_idx += length;
752 self.state = InflateState::DynBlock;
753 }
754 InflateState::End => {
755 return Ok(0);
756 },
757 }
758 }
759 }
760 ///! Resets decoder state.
761 pub fn reset(&mut self) {
762 self.bpos = 0;
763 self.output_idx = 0;
764 self.full_pos = 0;
765 self.state = InflateState::Start;
766 }
767
768 ///! Decompresses input data into output returning the uncompressed data length.
769 pub fn uncompress(src: &[u8], dst: &mut [u8]) -> DecompressResult<usize> {
770 let mut inflate = Self::new();
771 inflate.decompress_data(src, dst, false)
772 }
773 }
774
775 impl Default for Inflate {
776 fn default() -> Self {
777 Self::new()
778 }
779 }
780
781 fn lengths_to_codes(lens: &[u8], codes: &mut [ShortCodebookDesc]) -> DecompressResult<()> {
782 let mut bits = [0u32; 32];
783 let mut pfx = [0u32; 33];
784 for len in lens.iter() {
785 let len = *len as usize;
786 if len >= bits.len() {
787 return Err(DecompressError::InvalidHeader);
788 }
789 bits[len] += 1;
790 }
791 bits[0] = 0;
792 let mut code = 0;
793 for i in 0..bits.len() {
794 code = (code + bits[i]) << 1;
795 pfx[i + 1] = code;
796 }
797
798 for (len, codes) in lens.iter().zip(codes.iter_mut()) {
799 let len = *len as usize;
800 if len != 0 {
801 let bits = len as u8;
802 *codes = ShortCodebookDesc { code: reverse_bits(pfx[len], bits), bits };
803 pfx[len] += 1;
804 } else {
805 *codes = ShortCodebookDesc { code: 0, bits: 0 };
806 }
807 }
808
809 Ok(())
810 }
811
812 struct GzipCRC32 {
813 tab: [u32; 256],
814 crc: u32,
815 }
816
817 impl GzipCRC32 {
818 #[allow(clippy::unreadable_literal)]
819 fn new() -> Self {
820 let mut tab = [0u32; 256];
821 for i in 0..256 {
822 let mut c = i as u32;
823 for _ in 0..8 {
824 if (c & 1) != 0 {
825 c = 0xEDB88320 ^ (c >> 1);
826 } else {
827 c >>= 1;
828 }
829 }
830 tab[i] = c;
831 }
832 Self { tab, crc: 0 }
833 }
834 fn update_crc(&mut self, src: &[u8]) {
835 let mut c = !self.crc;
836 for el in src.iter() {
837 c = self.tab[((c ^ u32::from(*el)) & 0xFF) as usize] ^ (c >> 8);
838 }
839 self.crc = !c;
840 }
841 }
842
843 ///! Decodes input data in gzip file format (RFC 1952) returning a vector containing decoded data.
844 pub fn gzip_decode(br: &mut ByteReader, skip_crc: bool) -> DecompressResult<Vec<u8>> {
845 const FLAG_HCRC: u8 = 0x02;
846 const FLAG_EXTRA: u8 = 0x04;
847 const FLAG_NAME: u8 = 0x08;
848 const FLAG_COMMENT: u8 = 0x10;
849
850 let id1 = br.read_byte()?;
851 let id2 = br.read_byte()?;
852 let cm = br.read_byte()?;
853 let flg = br.read_byte()?;
854 let _mtime = br.read_u32le()?;
855 let _xfl = br.read_byte()?;
856 let _os = br.read_byte()?;
857 if id1 != 0x1F || id2 != 0x8B || cm != 8 {
858 return Err(DecompressError::InvalidHeader);
859 }
860
861 if (flg & FLAG_EXTRA) != 0 {
862 let xlen = br.read_u16le()? as usize;
863 br.read_skip(xlen)?;
864 }
865 if (flg & FLAG_NAME) != 0 {
866 loop {
867 let b = br.read_byte()?;
868 if b == 0 {
869 break;
870 }
871 }
872 }
873 if (flg & FLAG_COMMENT) != 0 {
874 loop {
875 let b = br.read_byte()?;
876 if b == 0 {
877 break;
878 }
879 }
880 }
881 let _hcrc = if (flg & FLAG_HCRC) != 0 {
882 br.read_u16le()?
883 } else {
884 0
885 };
886 if (flg & 0xE0) != 0 {
887 return Err(DecompressError::Unsupported);
888 }
889
890 let mut output: Vec<u8> = Vec::new();
891 let mut tail = [0u8; 8];
892 let mut inblk = [0u8; 1024];
893 let mut oblk = [0u8; 4096];
894 let mut inflate = Inflate::new();
895 let mut checker = GzipCRC32::new();
896
897 loop {
898 let ret = br.read_buf_some(&mut inblk);
899 if let Err(ByteIOError::EOF) = ret {
900 break;
901 }
902 let inlen = match ret {
903 Ok(val) => val,
904 Err(_) => return Err(DecompressError::IOError),
905 };
906 let mut repeat = false;
907 loop {
908 let ret = inflate.decompress_data(&inblk[..inlen], &mut oblk, repeat);
909 match ret {
910 Ok(outlen) => {
911 checker.update_crc(&oblk[..outlen]);
912 output.extend_from_slice(&oblk[..outlen]);
913 break;
914 },
915 Err(DecompressError::ShortData) => {
916 break;
917 },
918 Err(DecompressError::OutputFull) => {
919 repeat = true;
920 checker.update_crc(&oblk);
921 output.extend_from_slice(&oblk);
922 },
923 Err(err) => {
924 return Err(err);
925 },
926 }
927 }
928 // Save last 8 bytes for CRC and size.
929 if inlen >= 8 {
930 tail.copy_from_slice(&inblk[inlen - 8..][..8]);
931 } else {
932 let shift_len = 8 - inlen;
933 for i in 0..shift_len {
934 tail[i] = tail[i + inlen];
935 }
936 for i in shift_len..8 {
937 tail[i] = inblk[i - shift_len];
938 }
939 }
940 }
941 if !skip_crc {
942 if !inflate.is_finished() { println!("???"); }
943 let crc = read_u32le(&tail[0..4])?;
944 let size = read_u32le(&tail[4..8])?;
945 if size != (output.len() as u32) {
946 return Err(DecompressError::CRCError);
947 }
948 if crc != checker.crc {
949 return Err(DecompressError::CRCError);
950 }
951 }
952
953 Ok(output)
954 }
955
956 #[derive(Clone,Copy,Default)]
957 struct Token {
958 sym: u16,
959 distsym: u8,
960 len: u16,
961 dist: u16,
962 }
963
964 const TOKEN_EOB: Token = Token { sym: 256, distsym: 0, len: 0, dist: 0 };
965
966 impl Token {
967 fn from_literal(sym: u8) -> Self {
968 Self {
969 sym: u16::from(sym),
970 distsym: 0,
971 dist: 0,
972 len: 0,
973 }
974 }
975 fn from_match(dist: u16, len: u16) -> Self {
976 let sym = match len {
977 3..= 10 => 257 + len - 3,
978 11..= 18 => 265 + (len - 11) / 2,
979 19..= 34 => 269 + (len - 19) / 4,
980 35..= 66 => 273 + (len - 35) / 8,
981 67..=130 => 277 + (len - 67) / 16,
982 131..=257 => 281 + (len - 131) / 32,
983 _ => 285,
984 };
985 let distsym = if dist <= 4 {
986 (dist - 1) as u8
987 } else {
988 let bits = 16 - (dist - 1).leading_zeros();
989 (bits as u8) * 2 - 2 + if ((dist - 1) & (1 << (bits - 2))) != 0 { 1 } else { 0 }
990 };
991 Self {
992 sym, distsym, len, dist
993 }
994 }
995 }
996
997 fn add_codes(lens: &[u8], stats: &mut [u32], toks: &mut Vec<(u8, u8)>) {
998 let mut last = 42;
999 let mut lcount = 0;
1000
1001 for &len in lens.iter() {
1002 if len == last {
1003 lcount += 1;
1004 } else {
1005 if last == 0 {
1006 while lcount > 10 {
1007 let run = lcount.min(138);
1008 stats[18] += 1;
1009 toks.push((18, run - 11));
1010 lcount -= run;
1011 }
1012 if lcount >= 3 {
1013 stats[17] += 1;
1014 toks.push((17, lcount - 3));
1015 lcount = 0;
1016 }
1017 for _ in 0..lcount {
1018 stats[0] += 1;
1019 toks.push((0, 0));
1020 }
1021 } else {
1022 while lcount >= 3 {
1023 let run = lcount.min(6);
1024 stats[16] += 1;
1025 toks.push((16, run - 3));
1026 lcount -= run;
1027 }
1028 for _ in 0..lcount {
1029 stats[last as usize] += 1;
1030 toks.push((last, 0));
1031 }
1032 }
1033 stats[len as usize] += 1;
1034 toks.push((len, 0));
1035 last = len;
1036 lcount = 0;
1037 }
1038 }
1039 if lcount > 0 {
1040 if last == 0 {
1041 while lcount > 10 {
1042 let run = lcount.min(138);
1043 stats[18] += 1;
1044 toks.push((18, run - 11));
1045 lcount -= run;
1046 }
1047 if lcount >= 3 {
1048 stats[17] += 1;
1049 toks.push((17, lcount - 3));
1050 lcount = 0;
1051 }
1052 for _ in 0..lcount {
1053 stats[0] += 1;
1054 toks.push((0, 0));
1055 }
1056 } else {
1057 while lcount >= 3 {
1058 let run = lcount.min(6);
1059 stats[16] += 1;
1060 toks.push((16, run - 3));
1061 lcount -= run;
1062 }
1063 for _ in 0..lcount {
1064 stats[last as usize] += 1;
1065 toks.push((last, 0));
1066 }
1067 }
1068 }
1069 }
1070
1071 ///! Deflate stream writer.
1072 pub struct DeflateWriter {
1073 dst: Vec<u8>,
1074 bits: u8,
1075 bbuf: u32,
1076 }
1077
1078 impl DeflateWriter {
1079 ///! Creates a new instance of `DeflateWriter` for a provided output.
1080 pub fn new(dst: Vec<u8>) -> Self {
1081 Self {
1082 dst,
1083 bits: 0,
1084 bbuf: 0,
1085 }
1086 }
1087 fn align(&mut self) {
1088 if (self.bits & 7) != 0 {
1089 self.bits += 8 - (self.bits & 7);
1090 }
1091 }
1092 fn flush(&mut self) {
1093 while self.bits >= 8 {
1094 self.dst.push(self.bbuf as u8);
1095 self.bbuf >>= 8;
1096 self.bits -= 8;
1097 }
1098 }
1099 fn write(&mut self, val: u16, len: u8) {
1100 self.flush();
1101 self.bbuf |= u32::from(val) << self.bits;
1102 self.bits += len;
1103 }
1104 ///! Finishes writing the stream and returns the output vector.
1105 pub fn end(mut self) -> Vec<u8> {
1106 self.flush();
1107 if self.bits > 0 {
1108 self.dst.push(self.bbuf as u8);
1109 }
1110 self.dst
1111 }
1112
1113 fn write_codes(&mut self, codes: &CodeHuff, dists: &DistHuff) {
1114 let mut stats = [0u32; 19];
1115 let mut toks = Vec::with_capacity(NUM_LITERALS + NUM_DISTS);
1116 let mut cw = [0u16; 19];
1117 let mut cl = [0u8; 19];
1118 let mut nc = 0;
1119
1120 add_codes(&codes.lens[..codes.num_codes], &mut stats, &mut toks);
1121 add_codes(&dists.lens[..dists.num_codes], &mut stats, &mut toks);
1122
1123 gen_tree(&mut cw, &mut cl, &mut nc, &mut stats, 7);
1124
1125 nc = cw.len();
1126 for &idx in LEN_RECODE.iter().rev() {
1127 if cl[idx] == 0 {
1128 nc -= 1;
1129 } else {
1130 break;
1131 }
1132 }
1133 if nc < 4 {
1134 nc = 4;
1135 }
1136 self.write((nc - 4) as u16, 4);
1137 for &idx in LEN_RECODE.iter().take(nc) {
1138 self.write(u16::from(cl[idx]), 3);
1139 }
1140 for &(sym, add) in toks.iter() {
1141 self.write(cw[sym as usize], cl[sym as usize]);
1142 match sym {
1143 16 => self.write(u16::from(add), 2),
1144 17 => self.write(u16::from(add), 3),
1145 18 => self.write(u16::from(add), 7),
1146 _ => {},
1147 };
1148 }
1149 }
1150 fn write_tokens(&mut self, src: &[Token], codes: &CodeHuff, dists: &DistHuff) {
1151 for &tok in src.iter() {
1152 self.write(codes.codes[tok.sym as usize], codes.lens[tok.sym as usize]);
1153 if tok.sym > 256 {
1154 self.write_len_bits(tok.len);
1155 self.write(dists.codes[tok.distsym as usize], dists.lens[tok.distsym as usize]);
1156 self.write_dist_bits(tok.dist);
1157 }
1158 }
1159 }
1160 fn write_len_bits(&mut self, len: u16) {
1161 let llen = len - 3;
1162 if llen >= 8 && llen < 255 {
1163 let bits = (16 - llen.leading_zeros() - 3) as u8;
1164 self.write(llen & ((1 << bits) - 1), bits);
1165 }
1166 }
1167 fn write_dist_bits(&mut self, dist: u16) {
1168 let ddist = dist - 1;
1169 if dist >= 4 {
1170 let bits = (16 - ddist.leading_zeros() - 2) as u8;
1171 self.write(ddist & ((1 << bits) - 1), bits);
1172 }
1173 }
1174 }
1175
1176 struct CodeHuff {
1177 is_fixed: bool,
1178 stats: [u32; NUM_LITERALS],
1179 codes: [u16; NUM_LITERALS],
1180 lens: [u8; NUM_LITERALS],
1181 num_codes: usize,
1182 }
1183
1184 impl CodeHuff {
1185 fn new(is_fixed: bool) -> Self {
1186 Self {
1187 is_fixed,
1188 stats: [0; NUM_LITERALS],
1189 codes: [0; NUM_LITERALS],
1190 lens: [0; NUM_LITERALS],
1191 num_codes: NUM_LITERALS,
1192 }
1193 }
1194 fn make_codes(&mut self, src: &[Token]) {
1195 if self.is_fixed {
1196 for i in 0..=143 {
1197 self.codes[i] = reverse_bits((i + 0x30) as u32, 8) as u16;
1198 self.lens[i] = 8;
1199 }
1200 for i in 144..=255 {
1201 self.codes[i] = reverse_bits((i + 0x100) as u32, 9) as u16;
1202 self.lens[i] = 9;
1203 }
1204 for i in 256..=279 {
1205 self.codes[i] = reverse_bits((i & 0x1F) as u32, 7) as u16;
1206 self.lens[i] = 7;
1207 }
1208 for i in 280..NUM_LITERALS {
1209 self.codes[i] = reverse_bits((i - 280 + 0xC0) as u32, 8) as u16;
1210 self.lens[i] = 8;
1211 }
1212 } else {
1213 for &tok in src.iter() {
1214 self.stats[tok.sym as usize] += 1;
1215 }
1216 gen_tree(&mut self.codes, &mut self.lens, &mut self.num_codes, &mut self.stats, 15);
1217 if self.num_codes < 257 {
1218 self.num_codes = 257;
1219 }
1220 }
1221 }
1222 }
1223
1224 struct DistHuff {
1225 is_fixed: bool,
1226 stats: [u32; NUM_DISTS],
1227 codes: [u16; NUM_DISTS],
1228 lens: [u8; NUM_DISTS],
1229 num_codes: usize,
1230 }
1231
1232 impl DistHuff {
1233 fn new(is_fixed: bool) -> Self {
1234 Self {
1235 is_fixed,
1236 stats: [0; NUM_DISTS],
1237 codes: [0; NUM_DISTS],
1238 lens: [0; NUM_DISTS],
1239 num_codes: NUM_DISTS,
1240 }
1241 }
1242 fn make_codes(&mut self, src: &[Token]) {
1243 if self.is_fixed {
1244 for i in 0..NUM_DISTS {
1245 self.codes[i] = reverse_bits(i as u32, 5) as u16;
1246 self.lens[i] = 5;
1247 }
1248 } else {
1249 for &tok in src.iter() {
1250 if tok.sym > 256 {
1251 self.stats[tok.distsym as usize] += 1;
1252 }
1253 }
1254 gen_tree(&mut self.codes, &mut self.lens, &mut self.num_codes, &mut self.stats, 15);
1255 if self.num_codes < 1 {
1256 self.num_codes = 1;
1257 }
1258 }
1259 }
1260 }
1261
1262 #[derive(Clone,Copy,Default)]
1263 struct Node {
1264 sym: u16,
1265 w: u16,
1266 idx0: u16,
1267 idx1: u16,
1268 }
1269
1270 const NODE_SYM: u16 = 65500;
1271
1272 struct Tree {
1273 nodes: [Node; NUM_LITERALS * 2],
1274 nnodes: usize,
1275 }
1276
1277 impl Tree {
1278 fn new() -> Self {
1279 Self {
1280 nodes: [Node::default(); NUM_LITERALS * 2],
1281 nnodes: 0,
1282 }
1283 }
1284 fn insert(&mut self, val: Node) {
1285 let mut idx = self.nnodes;
1286 for (i, nn) in self.nodes[..self.nnodes].iter().enumerate() {
1287 if nn.w > val.w {
1288 idx = i;
1289 break;
1290 }
1291 }
1292 if idx < self.nnodes {
1293 for i in (idx..self.nnodes).rev() {
1294 self.nodes[i + 1] = self.nodes[i];
1295 }
1296 }
1297 self.nodes[idx] = val;
1298 self.nnodes += 1;
1299 }
1300 fn trim(&mut self) {
1301 let mut start = self.nnodes;
1302 for (i, n) in self.nodes[..self.nnodes].iter().enumerate() {
1303 if n.w != 0 {
1304 start = i;
1305 break;
1306 }
1307 }
1308 if start != 0 {
1309 for i in 0..self.nnodes - start {
1310 self.nodes[i] = self.nodes[i + start];
1311 }
1312 self.nnodes -= start;
1313 }
1314 }
1315 fn build(&mut self) {
1316 if self.nnodes == 1 {
1317 self.nodes[0].w = 1;
1318 return;
1319 }
1320 let mut start = 0;
1321 while start + 1 < self.nnodes {
1322 let nn1 = self.nodes[start];
1323 let nn2 = self.nodes[start + 1];
1324 let n = Node {
1325 sym: NODE_SYM,
1326 w: nn1.w + nn2.w,
1327 idx0: start as u16,
1328 idx1: (start + 1) as u16,
1329 };
1330 self.nodes[start].w = 0;
1331 self.nodes[start + 1].w = 0;
1332 start += 2;
1333 self.insert(n);
1334 }
1335 if self.nnodes > 1 {
1336 self.assign_len(self.nnodes - 1, 0);
1337 }
1338 }
1339 fn assign_len(&mut self, idx: usize, len: u16) {
1340 if self.nodes[idx].sym == NODE_SYM {
1341 self.assign_len(self.nodes[idx].idx0 as usize, len + 1);
1342 self.assign_len(self.nodes[idx].idx1 as usize, len + 1);
1343 } else {
1344 self.nodes[idx].w = len;
1345 }
1346 }
1347 }
1348
1349 fn gen_tree(codes: &mut [u16], lens: &mut [u8], num_codes: &mut usize, stats: &mut [u32], max_bits: u8) {
1350 let mut tot_w = 0;
1351 for &w in stats.iter() {
1352 tot_w += w;
1353 }
1354 if tot_w == 0 {
1355 codes[0] = 0;
1356 lens[0] = 0;
1357 *num_codes = 0;
1358 return;
1359 }
1360 while tot_w > (1 << max_bits) {
1361 for w in stats.iter_mut() {
1362 *w = (*w + 1) >> 1;
1363 }
1364 tot_w = 0;
1365 for &w in stats.iter() {
1366 tot_w += w;
1367 }
1368 }
1369 let mut tree = Tree::new();
1370 for (sym, &w) in stats.iter().enumerate() {
1371 tree.insert(Node{ sym: sym as u16, w: w as u16, idx0: 64000, idx1: 64000 });
1372 }
1373 tree.trim();
1374 tree.build();
1375
1376 for n in tree.nodes[..tree.nnodes].iter() {
1377 if n.sym != NODE_SYM {
1378 lens[n.sym as usize] = n.w as u8;
1379 }
1380 }
1381 lengths_to_codes16(lens, codes);
1382 let mut sz = codes.len();
1383 for &len in lens.iter().rev() {
1384 if len != 0 {
1385 break;
1386 }
1387 sz -= 1;
1388 }
1389 *num_codes = sz;
1390 }
1391
1392 fn lengths_to_codes16(lens: &[u8], codes: &mut [u16]) {
1393 let mut bits = [0u32; 32];
1394 let mut pfx = [0u32; 33];
1395 for len in lens.iter() {
1396 let len = *len as usize;
1397 bits[len] += 1;
1398 }
1399 bits[0] = 0;
1400 let mut code = 0;
1401 for i in 0..bits.len() {
1402 code = (code + bits[i]) << 1;
1403 pfx[i + 1] = code;
1404 }
1405
1406 for (len, codes) in lens.iter().zip(codes.iter_mut()) {
1407 let len = *len as usize;
1408 if len != 0 {
1409 let bits = len as u8;
1410 *codes = reverse_bits(pfx[len], bits) as u16;
1411 pfx[len] += 1;
1412 } else {
1413 *codes = 0;
1414 }
1415 }
1416 }
1417
1418 trait LZParse {
1419 fn parse(&mut self, src: &[u8], dst: &mut Vec<Token>);
1420 }
1421
1422 struct NoParser {}
1423 impl LZParse for NoParser {
1424 fn parse(&mut self, src: &[u8], dst: &mut Vec<Token>) {
1425 dst.reserve(src.len());
1426 for &b in src.iter() {
1427 dst.push(Token::from_literal(b));
1428 }
1429 dst.push(TOKEN_EOB);
1430 }
1431 }
1432
1433 fn check_match(src1: &[u8], src2: &[u8]) -> u16 {
1434 let mut len = 0;
1435 for (&a, &b) in src1.iter().zip(src2.iter()) {
1436 if a != b {
1437 break;
1438 }
1439 len += 1;
1440 }
1441 len
1442 }
1443
1444 const HASH_SIZE: usize = 4096;
1445 const MAX_MATCH_LEN: usize = 258;
1446 const WINDOW_SIZE: usize = 32768 - MAX_MATCH_LEN;
1447 const NONEXT: usize = WINDOW_SIZE * 2;
1448
1449 struct MatchFinder<'a> {
1450 src: &'a [u8],
1451 pos: usize,
1452 hstart: [usize; HASH_SIZE],
1453 hend: [usize; HASH_SIZE],
1454 hnext: [usize; WINDOW_SIZE * 3],
1455 }
1456
1457 impl<'a> MatchFinder<'a> {
1458 fn new(src: &'a [u8]) -> Self {
1459 let mut obj = Self {
1460 src,
1461 pos: 0,
1462 hstart: [0; HASH_SIZE],
1463 hend: [0; HASH_SIZE],
1464 hnext: [0; WINDOW_SIZE * 3],
1465 };
1466 obj.build_hash();
1467 obj
1468 }
1469 fn hash(src: &[u8]) -> usize {
1470 let _ = src[2];
1471 (((u16::from(src[0]) << 10) ^ (u16::from(src[1]) << 5) ^ u16::from(src[2])) & ((HASH_SIZE as u16) - 1)) as usize
1472 }
1473 fn build_hash(&mut self) {
1474 for el in self.hstart.iter_mut() { *el = NONEXT; }
1475 for el in self.hend.iter_mut() { *el = NONEXT; }
1476 for el in self.hnext.iter_mut() { *el = NONEXT; }
1477 if self.pos + 3 >= self.src.len() {
1478 return;
1479 }
1480 let end = (self.src.len() - 3).min(self.pos + NONEXT);
1481 for i in (self.pos .. end).rev() {
1482 let key = Self::hash(&self.src[i..]);
1483 if self.hstart[key] == NONEXT {
1484 self.hstart[key] = i;
1485 self.hend[key] = i;
1486 self.hnext[key] = NONEXT;
1487 } else {
1488 self.hnext[self.hend[key]] = i;
1489 self.hend[key] = i;
1490 }
1491 }
1492 }
1493 fn find_match(&mut self) -> (u16, u16) {
1494 if self.pos == 0 || self.pos + 3 > self.src.len() {
1495 return (0, 0);
1496 }
1497 let key = Self::hash(&self.src[self.pos..]) as usize;
1498
1499 let mut best_pos = 0;
1500 let mut best_len = 0;
1501 let mut idx = self.hstart[key];
1502 while idx != NONEXT && idx + WINDOW_SIZE > self.pos {
1503 if idx < self.pos {
1504 let cur_len = check_match(&self.src[self.pos..], &self.src[idx..]);
1505 if cur_len > best_len {
1506 best_len = cur_len;
1507 best_pos = self.pos - idx;
1508 if best_len >= (MAX_MATCH_LEN as u16) {
1509 return (best_pos as u16, MAX_MATCH_LEN as u16);
1510 }
1511 }
1512 }
1513 idx = self.hnext[idx];
1514 }
1515 (best_pos as u16, best_len)
1516 }
1517 fn find_all_matches(&mut self, dists: &mut [u16; MAX_MATCH_LEN + 1]) {
1518 if self.pos == 0 || self.pos + 3 > self.src.len() {
1519 return;
1520 }
1521 let key = Self::hash(&self.src[self.pos..]) as usize;
1522 let mut idx = self.hstart[key];
1523 while idx != NONEXT && idx + WINDOW_SIZE > self.pos {
1524 if idx < self.pos {
1525 let cur_len = (check_match(&self.src[self.pos..], &self.src[idx..]) as usize).min(MAX_MATCH_LEN);
1526 if cur_len > 0 && dists[cur_len] == 0 {
1527 dists[cur_len] = (self.pos - idx) as u16;
1528 }
1529 }
1530 idx = self.hnext[idx];
1531 }
1532 }
1533 fn advance(&mut self, num: usize) {
1534 self.pos += num;
1535
1536 if self.pos >= NONEXT {
1537 let (_, tail) = self.src.split_at(self.pos - WINDOW_SIZE);
1538 self.src = tail;
1539 self.pos = WINDOW_SIZE;
1540 self.build_hash();
1541 } else {
1542 for (start, end) in self.hstart.iter_mut().zip(self.hend.iter_mut()) {
1543 let mut idx = *start;
1544 while idx != NONEXT && idx + WINDOW_SIZE < self.pos {
1545 idx = self.hnext[idx];
1546 *start = idx;
1547 }
1548 if idx == NONEXT {
1549 *end = NONEXT;
1550 }
1551 }
1552 }
1553 }
1554 fn get_sym(&self) -> u8 { self.src[self.pos] }
1555 fn is_end(&self) -> bool { self.pos >= self.src.len() }
1556 }
1557
1558 struct GreedyParser {}
1559 impl LZParse for GreedyParser {
1560 fn parse(&mut self, src: &[u8], dst: &mut Vec<Token>) {
1561 dst.reserve(src.len());
1562
1563 let mut matcher = MatchFinder::new(src);
1564 while !matcher.is_end() {
1565 let (best_pos, best_len) = matcher.find_match();
1566
1567 if best_len >= 3 {
1568 dst.push(Token::from_match(best_pos, best_len));
1569 matcher.advance(best_len as usize);
1570 } else {
1571 dst.push(Token::from_literal(matcher.get_sym()));
1572 matcher.advance(1);
1573 }
1574 }
1575 dst.push(TOKEN_EOB);
1576 }
1577 }
1578
1579 struct LazyParser {}
1580 impl LZParse for LazyParser {
1581 fn parse(&mut self, src: &[u8], dst: &mut Vec<Token>) {
1582 dst.reserve(src.len());
1583
1584 let mut matcher = MatchFinder::new(src);
1585 while !matcher.is_end() {
1586 let (best_pos, best_len) = matcher.find_match();
1587 if best_len >= 3 {
1588 let last_sym = matcher.get_sym();
1589 matcher.advance(1);
1590 if !matcher.is_end() {
1591 let (best_pos1, best_len1) = matcher.find_match();
1592 if best_len1 > best_len + 1 {
1593 dst.push(Token::from_literal(last_sym));
1594 dst.push(Token::from_match(best_pos1, best_len1));
1595 matcher.advance(best_len1 as usize);
1596 continue;
1597 }
1598 }
1599 dst.push(Token::from_match(best_pos, best_len));
1600 matcher.advance((best_len - 1) as usize);
1601 } else {
1602 dst.push(Token::from_literal(matcher.get_sym()));
1603 matcher.advance(1);
1604 }
1605 }
1606 dst.push(TOKEN_EOB);
1607 }
1608 }
1609
1610 #[derive(Clone,Copy)]
1611 struct TNode {
1612 price: u32,
1613 dist: u16,
1614 link: usize,
1615 }
1616
1617 impl Default for TNode {
1618 fn default() -> Self {
1619 Self {
1620 price: std::u32::MAX,
1621 dist: 0,
1622 link: 0,
1623 }
1624 }
1625 }
1626
1627 struct OptimalParser {
1628 trellis: Vec<TNode>,
1629 }
1630 impl OptimalParser {
1631 fn new() -> Self { Self::default() }
1632 fn sym_price(_sym: u8) -> u32 { 9 }
1633 fn match_price(dist: u16, _len: u16) -> u32 {
1634 if dist <= 4 {
1635 9 + 5
1636 } else {
1637 let bits = 16 - (dist - 1).leading_zeros();
1638 9 + 3 + bits
1639 }
1640 }
1641 }
1642 impl Default for OptimalParser {
1643 fn default() -> Self {
1644 Self {
1645 trellis: Vec::with_capacity(WINDOW_SIZE),
1646 }
1647 }
1648 }
1649 impl LZParse for OptimalParser {
1650 fn parse(&mut self, src: &[u8], dst: &mut Vec<Token>) {
1651 if src.is_empty() {
1652 dst.push(TOKEN_EOB);
1653 return;
1654 }
1655 dst.reserve(src.len());
1656
1657 self.trellis.truncate(0);
1658 self.trellis.reserve(src.len() + 1);
1659 for _ in 0..=src.len() {
1660 self.trellis.push(TNode::default());
1661 }
1662 self.trellis[0].price = 0;
1663
1664 let mut matcher = MatchFinder::new(src);
1665 for i in 0..self.trellis.len() - 1 {
1666 let mut dists = [0; MAX_MATCH_LEN + 1];
1667 matcher.find_all_matches(&mut dists);
1668
1669 let sym = matcher.get_sym();
1670 let lprice = Self::sym_price(sym) + self.trellis[i].price;
1671 if self.trellis[i + 1].price > lprice {
1672 self.trellis[i + 1].price = lprice;
1673 self.trellis[i + 1].link = i;
1674 }
1675 for (len, &dist) in dists.iter().enumerate() {
1676 if dist != 0 {
1677 let mprice = Self::match_price(dist, len as u16) + self.trellis[i].price;
1678 if self.trellis[i + len].price > mprice {
1679 self.trellis[i + len].price = mprice;
1680 self.trellis[i + len].link = i;
1681 self.trellis[i].dist = dist;
1682 }
1683 }
1684 }
1685
1686 matcher.advance(1);
1687 }
1688 let mut idx = self.trellis.len() - 1;
1689 let mut nidx = self.trellis[idx].link;
1690 while idx > 0 {
1691 let oidx = idx;
1692 idx = nidx;
1693 nidx = self.trellis[idx].link;
1694 self.trellis[idx].link = oidx;
1695 }
1696
1697 let mut idx = 0;
1698 while idx < self.trellis.len() - 1 {
1699 let len = self.trellis[idx].link - idx;
1700 if len == 1 {
1701 dst.push(Token::from_literal(src[idx]));
1702 } else {
1703 dst.push(Token::from_match(self.trellis[idx].dist, len as u16));
1704 }
1705 idx = self.trellis[idx].link;
1706 }
1707
1708 dst.push(TOKEN_EOB);
1709 }
1710 }
1711
1712 ///! Deflate compression mode.
1713 #[derive(Clone,Copy,Debug,PartialEq)]
1714 pub enum DeflateMode {
1715 ///! No compression.
1716 NoCompr,
1717 ///! Fast compression.
1718 Fast,
1719 ///! Still fast but better compression.
1720 Better,
1721 ///! Slow but the best compression.
1722 Best,
1723 }
1724
1725 impl Default for DeflateMode {
1726 fn default() -> Self { DeflateMode::Better }
1727 }
1728
1729 pub const DEFLATE_MODE_DESCRIPTION: &str = "Deflate compression level.";
1730 ///! Deflate option for no compression.
1731 pub const DEFLATE_MODE_NONE: &str = "none";
1732 ///! Deflate option for fast compression.
1733 pub const DEFLATE_MODE_FAST: &str = "fast";
1734 ///! Deflate option for better compression.
1735 pub const DEFLATE_MODE_BETTER: &str = "better";
1736 ///! Deflate option for best compression.
1737 pub const DEFLATE_MODE_BEST: &str = "best";
1738
1739 ///! All possible option values for deflate compression.
1740 pub const DEFLATE_OPTION_VALUES: NAOptionDefinitionType = NAOptionDefinitionType::String(Some(&[DEFLATE_MODE_NONE, DEFLATE_MODE_FAST, DEFLATE_MODE_BETTER, DEFLATE_MODE_BEST]));
1741
1742 impl std::str::FromStr for DeflateMode {
1743 type Err = ();
1744
1745 fn from_str(s: &str) -> Result<Self, Self::Err> {
1746 match s {
1747 DEFLATE_MODE_NONE => Ok(DeflateMode::NoCompr),
1748 DEFLATE_MODE_FAST => Ok(DeflateMode::Fast),
1749 DEFLATE_MODE_BETTER => Ok(DeflateMode::Better),
1750 DEFLATE_MODE_BEST => Ok(DeflateMode::Best),
1751 _ => Err(()),
1752 }
1753 }
1754 }
1755
1756 impl ToString for DeflateMode {
1757 fn to_string(&self) -> String {
1758 match *self {
1759 DeflateMode::NoCompr => DEFLATE_MODE_NONE.to_string(),
1760 DeflateMode::Fast => DEFLATE_MODE_FAST.to_string(),
1761 DeflateMode::Better => DEFLATE_MODE_BETTER.to_string(),
1762 DeflateMode::Best => DEFLATE_MODE_BEST.to_string(),
1763 }
1764 }
1765 }
1766
1767 #[derive(Clone,Copy,Debug,PartialEq)]
1768 enum Mode {
1769 Copy,
1770 Fixed,
1771 Dynamic,
1772 }
1773
1774 const MAX_BLOCK_SIZE: usize = 65535;
1775
1776 ///! Deflate stream compressor.
1777 pub struct Deflate {
1778 mode: Mode,
1779 tokens: Vec<Token>,
1780 srcbuf: [u8; MAX_BLOCK_SIZE],
1781 ssize: usize,
1782 sum1: u32,
1783 sum2: u32,
1784 zlib_mode: bool,
1785 parser: Box<dyn LZParse + Send>,
1786 }
1787
1788 impl Deflate {
1789 ///! Creates a new instance of `Deflate`.
1790 pub fn new(mode: DeflateMode) -> Self {
1791 let (mode, parser) = match mode {
1792 DeflateMode::NoCompr => (Mode::Copy, Box::new(NoParser{}) as Box<dyn LZParse + Send>),
1793 DeflateMode::Fast => (Mode::Fixed, Box::new(GreedyParser{}) as Box<dyn LZParse + Send>),
1794 DeflateMode::Better => (Mode::Dynamic, Box::new(LazyParser{}) as Box<dyn LZParse + Send>),
1795 DeflateMode::Best => (Mode::Dynamic, Box::new(OptimalParser::new()) as Box<dyn LZParse + Send>),
1796 };
1797 Self {
1798 mode, parser,
1799 tokens: Vec::with_capacity(MAX_BLOCK_SIZE),
1800 srcbuf: [0; MAX_BLOCK_SIZE],
1801 ssize: 0,
1802 sum1: 1,
1803 sum2: 0,
1804 zlib_mode: false,
1805 }
1806 }
1807 ///! Writes zlib stream header.
1808 pub fn write_zlib_header(&mut self, wr: &mut DeflateWriter) {
1809 wr.write(8, 4);
1810 wr.write(7, 4);
1811 let level = match self.mode {
1812 Mode::Copy => 0x01,
1813 Mode::Fixed => 0x5E,
1814 Mode::Dynamic => 0x9C,
1815 // 0xDA for the strongest one
1816 };
1817 wr.write(level, 8);
1818 self.zlib_mode = true;
1819 }
1820 fn write_zlib_footer(&self, wr: &mut DeflateWriter) {
1821 wr.align();
1822 wr.write((self.sum2 >> 8) as u16, 8);
1823 wr.write((self.sum2 & 0xFF) as u16, 8);
1824 wr.write((self.sum1 >> 8) as u16, 8);
1825 wr.write((self.sum1 & 0xFF) as u16, 8);
1826 }
1827 ///! Queues data for compression.
1828 ///!
1829 ///! The data might be not actually compressed until [`compress_end`] is called.
1830 ///!
1831 ///! [`compress_end`]: ./struct.Deflate.html#method.compress_end
1832 pub fn compress(&mut self, src: &[u8], wr: &mut DeflateWriter) {
1833 let mut src = src;
1834 while !src.is_empty() {
1835 let clen = src.len().min(MAX_BLOCK_SIZE - self.ssize);
1836 let (head, tail) = src.split_at(clen);
1837 src = tail;
1838 self.srcbuf[self.ssize..][..clen].copy_from_slice(head);
1839 self.ssize += clen;
1840 if self.ssize == MAX_BLOCK_SIZE {
1841 self.do_block(wr, false);
1842 }
1843 }
1844 }
1845 ///! Tells the encoder to finish data compression.
1846 ///!
1847 ///! Complete data will be output after this call.
1848 pub fn compress_end(&mut self, wr: &mut DeflateWriter) {
1849 if self.ssize > 0 {
1850 self.do_block(wr, true);
1851 } else {
1852 wr.write(1, 1);
1853 wr.write(1, 2);
1854 wr.write(0, 7); //static EOF sym
1855 }
1856 if self.zlib_mode {
1857 self.write_zlib_footer(wr);
1858 }
1859 }
1860 ///! Tells the encoder to compress the data it received and flush it.
1861 pub fn compress_flush(&mut self, wr: &mut DeflateWriter) {
1862 if self.ssize > 0 {
1863 self.do_block(wr, false);
1864 }
1865 if (wr.bits & 7) != 0 {
1866 // write zero-length copy block for byte-alignment
1867 wr.write(0, 1);
1868 wr.write(0, 2);
1869 wr.align();
1870 wr.write(0, 16);
1871 wr.write(0xFFFF, 16);
1872 }
1873 }
1874 fn do_block(&mut self, wr: &mut DeflateWriter, final_block: bool) {
1875 const CRC_BASE: u32 = 65521;
1876 for &b in self.srcbuf[..self.ssize].iter() {
1877 self.sum1 += u32::from(b);
1878 if self.sum1 >= CRC_BASE {
1879 self.sum1 -= CRC_BASE;
1880 }
1881 self.sum2 += self.sum1;
1882 if self.sum2 >= CRC_BASE {
1883 self.sum2 -= CRC_BASE;
1884 }
1885 }
1886 match self.mode {
1887 Mode::Copy => {
1888 wr.write(final_block as u16, 1);
1889 wr.write(0, 2);
1890 wr.align();
1891 wr.write(self.ssize as u16, 16);
1892 wr.write(!self.ssize as u16, 16);
1893 for &b in self.srcbuf[..self.ssize].iter() {
1894 wr.write(u16::from(b), 8);
1895 }
1896 },
1897 Mode::Fixed => {
1898 wr.write(final_block as u16, 1);
1899 wr.write(1, 2);
1900 self.tokens.truncate(0);
1901 self.parser.parse(&self.srcbuf[..self.ssize], &mut self.tokens);
1902 let mut codes = CodeHuff::new(true);
1903 codes.make_codes(&self.tokens);
1904 let mut dists = DistHuff::new(true);
1905 dists.make_codes(&self.tokens);
1906 wr.write_tokens(&self.tokens, &codes, &dists);
1907 },
1908 Mode::Dynamic => {
1909 wr.write(final_block as u16, 1);
1910 wr.write(2, 2);
1911 self.tokens.truncate(0);
1912 self.parser.parse(&self.srcbuf[..self.ssize], &mut self.tokens);
1913 let mut codes = CodeHuff::new(false);
1914 codes.make_codes(&self.tokens);
1915 let mut dists = DistHuff::new(false);
1916 dists.make_codes(&self.tokens);
1917 wr.write((codes.num_codes - 257) as u16, 5);
1918 wr.write((dists.num_codes - 1) as u16, 5);
1919 wr.write_codes(&codes, &dists);
1920 wr.write_tokens(&self.tokens, &codes, &dists);
1921 },
1922 }
1923 self.ssize = 0;
1924 }
1925 }
1926
1927 #[cfg(test)]
1928 mod test {
1929 use super::*;
1930
1931 #[test]
1932 fn test_inflate1() {
1933 const TEST_DATA: &[u8] = &[
1934 0xF3, 0x48, 0xCD, 0xC9, 0xC9, 0xD7, 0x51, 0x28,
1935 0xCF, 0x2F, 0xCA, 0x49, 0x51, 0x04, 0x00 ];
1936 const TEST_REF: &[u8] = b"Hello, world!";
1937 let mut dst_buf = [0u8; 13];
1938 let len = Inflate::uncompress(TEST_DATA, &mut dst_buf).unwrap();
1939 assert_eq!(len, 13);
1940 for i in 0..len {
1941 assert_eq!(dst_buf[i], TEST_REF[i]);
1942 }
1943 }
1944 #[test]
1945 fn test_inflate2() {
1946 const TEST_DATA3: &[u8] = &[ 0x4B, 0x4C, 0x44, 0x80, 0x24, 0x54, 0x80, 0x2C, 0x06, 0x00 ];
1947 const TEST_REF3: &[u8] = b"aaaaaaaaaaaabbbbbbbbbbbbbbbaaaaabbbbbbb";
1948 let mut dst_buf = [0u8; 39];
1949
1950 let mut inflate = Inflate::new();
1951 let mut output_chunk = [0u8; 7];
1952 let mut output_pos = 0;
1953 for input in TEST_DATA3.chunks(3) {
1954 let mut repeat = false;
1955 loop {
1956 let ret = inflate.decompress_data(input, &mut output_chunk, repeat);
1957 match ret {
1958 Ok(len) => {
1959 for i in 0..len {
1960 dst_buf[output_pos + i] = output_chunk[i];
1961 }
1962 output_pos += len;
1963 break;
1964 },
1965 Err(DecompressError::ShortData) => {
1966 break;
1967 },
1968 Err(DecompressError::OutputFull) => {
1969 repeat = true;
1970 for i in 0..output_chunk.len() {
1971 dst_buf[output_pos + i] = output_chunk[i];
1972 }
1973 output_pos += output_chunk.len();
1974 },
1975 _ => {
1976 panic!("decompress error {:?}", ret.err().unwrap());
1977 },
1978 }
1979 }
1980 }
1981
1982 assert_eq!(output_pos, dst_buf.len());
1983 for i in 0..output_pos {
1984 assert_eq!(dst_buf[i], TEST_REF3[i]);
1985 }
1986 }
1987 #[test]
1988 fn test_inflate3() {
1989 const TEST_DATA: &[u8] = &[
1990 0x1F, 0x8B, 0x08, 0x08, 0xF6, 0x7B, 0x90, 0x5E, 0x02, 0x03, 0x31, 0x2E, 0x74, 0x78, 0x74, 0x00,
1991 0xE5, 0x95, 0x4B, 0x4E, 0xC3, 0x30, 0x10, 0x40, 0xF7, 0x39, 0xC5, 0x1C, 0x00, 0x16, 0x70, 0x83,
1992 0x0A, 0xB5, 0x3B, 0xE8, 0x82, 0x5E, 0x60, 0x1A, 0x4F, 0xE2, 0x11, 0xFE, 0x44, 0x1E, 0xA7, 0x69,
1993 0x6E, 0xCF, 0x38, 0xDD, 0xB0, 0x40, 0xA2, 0x46, 0x2D, 0x20, 0x2A, 0xE5, 0xAB, 0xCC, 0xE7, 0xBD,
1994 0x49, 0xAC, 0x6C, 0x03, 0x64, 0x4B, 0xD0, 0x71, 0x92, 0x0C, 0x06, 0x67, 0x88, 0x1D, 0x3C, 0xD9,
1995 0xC4, 0x92, 0x3D, 0x4A, 0xF3, 0x3C, 0x43, 0x4E, 0x23, 0x81, 0x8B, 0x07, 0x82, 0x1E, 0xF5, 0x90,
1996 0x23, 0x78, 0x6A, 0x56, 0x30, 0x60, 0xCA, 0x89, 0x4D, 0x4F, 0xC0, 0x01, 0x10, 0x06, 0xC2, 0xA4,
1997 0xA1, 0x44, 0xCD, 0xF6, 0x54, 0x50, 0xA8, 0x8D, 0xC1, 0x9C, 0x5F, 0x71, 0x37, 0x45, 0xC8, 0x63,
1998 0xCA, 0x8E, 0xC0, 0xE8, 0x23, 0x69, 0x56, 0x9A, 0x8D, 0x5F, 0xB6, 0xC9, 0x96, 0x53, 0x4D, 0x17,
1999 0xAB, 0xB9, 0xB0, 0x49, 0x14, 0x5A, 0x0B, 0x96, 0x82, 0x7C, 0xB7, 0x6F, 0x17, 0x35, 0xC7, 0x9E,
2000 0xDF, 0x78, 0xA3, 0xF1, 0xD0, 0xA2, 0x73, 0x1C, 0x7A, 0xD8, 0x2B, 0xB3, 0x5C, 0x90, 0x85, 0xBB,
2001 0x2A, 0x14, 0x2E, 0xF7, 0xD1, 0x19, 0x48, 0x0A, 0x23, 0x57, 0x45, 0x13, 0x3E, 0xD6, 0xA0, 0xBD,
2002 0xF2, 0x11, 0x7A, 0x22, 0x21, 0xAD, 0xE5, 0x70, 0x56, 0xA0, 0x9F, 0xA5, 0xA5, 0x03, 0x85, 0x2A,
2003 0xDE, 0x92, 0x00, 0x32, 0x61, 0x10, 0xAD, 0x27, 0x13, 0x7B, 0x5F, 0x98, 0x7F, 0x59, 0x83, 0xB8,
2004 0xB7, 0x35, 0x16, 0xEB, 0x12, 0x0F, 0x1E, 0xD9, 0x14, 0x0B, 0xCF, 0xEE, 0x6D, 0x91, 0xF8, 0x93,
2005 0x6E, 0x81, 0x3F, 0x7F, 0x41, 0xA4, 0x22, 0x1F, 0xB7, 0xE6, 0x85, 0x83, 0x9A, 0xA2, 0x61, 0x12,
2006 0x0D, 0x0F, 0x6D, 0x01, 0xBD, 0xB0, 0xE8, 0x1D, 0xEC, 0xD1, 0xA0, 0xBF, 0x1F, 0x4E, 0xFB, 0x55,
2007 0xBD, 0x73, 0xDD, 0x87, 0xB9, 0x53, 0x23, 0x17, 0xD3, 0xE2, 0xE9, 0x08, 0x87, 0x42, 0xFF, 0xCF,
2008 0x26, 0x42, 0xAE, 0x76, 0xB5, 0xAE, 0x97, 0x0C, 0x18, 0x78, 0xA0, 0x24, 0xE5, 0x54, 0x0C, 0x6E,
2009 0x60, 0x52, 0x79, 0x22, 0x57, 0xF5, 0x87, 0x78, 0x78, 0x04, 0x93, 0x46, 0xEF, 0xCB, 0x98, 0x96,
2010 0x8B, 0x65, 0x00, 0xB7, 0x36, 0xBD, 0x77, 0xA8, 0xBD, 0x5A, 0xAA, 0x1A, 0x09, 0x00, 0x00
2011 ];
2012
2013 let mut mr = MemoryReader::new_read(TEST_DATA);
2014 let mut br = ByteReader::new(&mut mr);
2015 let _dst_buf = gzip_decode(&mut br, false).unwrap();
2016
2017 // println!("{}", String::from_utf8_lossy(_dst_buf.as_slice()));
2018 }
2019 #[test]
2020 fn test_deflate_crc() {
2021 let output = Vec::with_capacity(20);
2022 let mut writer = DeflateWriter::new(output);
2023 let mut compr = Deflate::new(DeflateMode::NoCompr);
2024 compr.write_zlib_header(&mut writer);
2025 compr.compress(b"Hello, world!", &mut writer);
2026 compr.compress_end(&mut writer);
2027 let output = writer.end();
2028 assert_eq!(output.as_slice(), b"\x78\x01\x01\x0D\x00\xF2\xFFHello, world!\x20\x5E\x04\x8A");
2029 }
2030 fn deflate_test(mode: DeflateMode) {
2031 const SRC: &[u8] =
2032 b"The first day of Christmas,
2033 My true love sent to me
2034 A partridge in a pear tree.
2035
2036 The second day of Christmas,
2037 My true love sent to me
2038 Two turtle doves, and
2039 A partridge in a pear tree.
2040
2041 The third day of Christmas,
2042 My true love sent to me
2043 Three French hens,
2044 Two turtle doves, and
2045 A partridge in a pear tree.
2046
2047 The fourth day of Christmas,
2048 My true love sent to me
2049 Four colly birds,
2050 Three French hens,
2051 Two turtle doves, and
2052 A partridge in a pear tree.
2053
2054 The fifth day of Christmas,
2055 My true love sent to me
2056 Five gold rings,
2057 Four colly birds,
2058 Three French hens,
2059 Two turtle doves, and
2060 A partridge in a pear tree.";
2061 let output = Vec::with_capacity(SRC.len() + 16);
2062 let mut writer = DeflateWriter::new(output);
2063 let mut compr = Deflate::new(mode);
2064 compr.write_zlib_header(&mut writer);
2065 compr.compress(SRC, &mut writer);
2066 compr.compress_end(&mut writer);
2067 let output = writer.end();
2068 let mut uncompr = vec![0u8; SRC.len()];
2069 Inflate::uncompress(&output, &mut uncompr).unwrap();
2070 assert_eq!(SRC, uncompr.as_slice());
2071 }
2072 #[test]
2073 fn test_deflate_fast() {
2074 deflate_test(DeflateMode::Fast);
2075 }
2076 #[test]
2077 fn test_deflate_better() {
2078 deflate_test(DeflateMode::Better);
2079 }
2080 #[test]
2081 fn test_deflate_best() {
2082 deflate_test(DeflateMode::Best);
2083 }
2084 }