deflate: fix align() in inflate
[nihav.git] / nihav-core / src / compr / deflate.rs
1 //! Deflate format (RFC 1951) support.
2 //!
3 //! This module provides functionality for decompressing raw deflated streams via [`Inflate`] and gzip files (RFC 1952) via [`gzip_decode`] and compressing raw or zlib streams via [`Deflate`].
4 //!
5 //! [`Deflate`]: ./struct.Deflate.html
6 //! [`Inflate`]: ./struct.Inflate.html
7 //! [`gzip_decode`]: ./fn.gzip_decode.html
8 //!
9 //! # Examples
10 //!
11 //! Decompressing full input buffer into sufficiently large output buffer:
12 //! ```
13 //! # use nihav_core::compr::DecompressError;
14 //! use nihav_core::compr::deflate::Inflate;
15 //!
16 //! # fn decompress(input: &[u8]) -> Result<(), DecompressError> {
17 //! # let mut output_buffer = [0u8; 16];
18 //! let output_length = Inflate::uncompress(input, &mut output_buffer)?;
19 //! # Ok(())
20 //! # }
21 //! ```
22 //!
23 //! Decompressing input chunks into portions of output:
24 //! ```
25 //! use nihav_core::compr::DecompressError;
26 //! use nihav_core::compr::deflate::Inflate;
27 //!
28 //! # fn decompress(input_data: &[u8]) -> Result<(), DecompressError> {
29 //! let mut inflate = Inflate::new();
30 //! let mut dst_buf: Vec<u8> = Vec::new();
31 //! let mut output_chunk = [0u8; 1024];
32 //! for src in input_data.chunks(512) {
33 //! let mut repeat = false;
34 //! loop {
35 //! let ret = inflate.decompress_data(src, &mut output_chunk, repeat);
36 //! match ret {
37 //! Ok(len) => { // we got a buffer decoded successfully to the end
38 //! dst_buf.extend_from_slice(&output_chunk[..len]);
39 //! break;
40 //! },
41 //! Err(DecompressError::ShortData) => { // this block of data was fully read
42 //! break;
43 //! },
44 //! Err(DecompressError::OutputFull) => {
45 //! // the output buffer is full, flush it and continue decoding the same block
46 //! repeat = true;
47 //! dst_buf.extend_from_slice(&output_chunk);
48 //! },
49 //! Err(err) => {
50 //! return Err(err);
51 //! },
52 //! }
53 //! }
54 //! }
55 //! # Ok(())
56 //! # }
57 //! ```
58 //!
59 //! Compressing input buffer into zlib stream:
60 //! ```
61 //! use nihav_core::compr::deflate::{Deflate, DeflateMode, DeflateWriter};
62 //!
63 //! # fn compress(input: &[u8]) {
64 //! let output = Vec::with_capacity(input.len() * 65540 / 65535 + 6);
65 //! let mut writer = DeflateWriter::new(output);
66 //! let mut compr = Deflate::new(DeflateMode::Fast);
67 //! compr.write_zlib_header(&mut writer);
68 //! compr.compress(input, &mut writer);
69 //! compr.compress_end(&mut writer);
70 //! let output = writer.end();
71 //! # }
72 //! ```
73
74 use crate::io::byteio::*;
75 use crate::io::bitreader::*;
76 use crate::io::codebook::*;
77 use super::*;
78
79 const NUM_LITERALS: usize = 287;
80 const NUM_DISTS: usize = 32;
81
82 struct FixedLenCodeReader {}
83
84 impl CodebookDescReader<u16> for FixedLenCodeReader {
85 fn bits(&mut self, idx: usize) -> u8 {
86 if idx < 144 { 8 }
87 else if idx < 256 { 9 }
88 else if idx < 280 { 7 }
89 else { 8 }
90 }
91 #[allow(clippy::identity_op)]
92 fn code(&mut self, idx: usize) -> u32 {
93 let base = idx as u32;
94 let bits = self.bits(idx);
95 if idx < 144 { reverse_bits(base + 0x30, bits) }
96 else if idx < 256 { reverse_bits(base + 0x190 - 144, bits) }
97 else if idx < 280 { reverse_bits(base + 0x000 - 256, bits) }
98 else { reverse_bits(base + 0xC0 - 280, bits) }
99 }
100 fn sym (&mut self, idx: usize) -> u16 { idx as u16 }
101 fn len(&mut self) -> usize { NUM_LITERALS + 1 }
102 }
103
104 #[derive(Clone,Copy,Default)]
105 struct BitReaderState {
106 pos: usize,
107 bitbuf: u32,
108 bits: u8,
109 }
110
111 struct CurrentSource<'a> {
112 src: &'a [u8],
113 br: BitReaderState,
114 }
115
116 impl<'a> CurrentSource<'a> {
117 fn new(src: &'a [u8], br: BitReaderState) -> Self {
118 let mut newsrc = Self { src, br };
119 newsrc.br.pos = 0;
120 newsrc.refill();
121 newsrc
122 }
123 fn reinit(src: &'a [u8], br: BitReaderState) -> Self {
124 let mut newsrc = Self { src, br };
125 newsrc.refill();
126 newsrc
127 }
128 fn refill(&mut self) {
129 while (self.br.bits <= 24) && (self.br.pos < self.src.len()) {
130 self.br.bitbuf |= u32::from(self.src[self.br.pos]) << self.br.bits;
131 self.br.bits += 8;
132 self.br.pos += 1;
133 }
134 }
135 fn skip_cache(&mut self, nbits: u8) {
136 self.br.bitbuf >>= nbits;
137 self.br.bits -= nbits;
138 }
139 fn read(&mut self, nbits: u8) -> BitReaderResult<u32> {
140 if nbits == 0 { return Ok(0); }
141 if nbits > 16 { return Err(BitReaderError::TooManyBitsRequested); }
142 if self.br.bits < nbits {
143 self.refill();
144 if self.br.bits < nbits { return Err(BitReaderError::BitstreamEnd); }
145 }
146 let ret = self.br.bitbuf & ((1 << nbits) - 1);
147 self.skip_cache(nbits);
148 Ok(ret)
149 }
150 fn read_bool(&mut self) -> BitReaderResult<bool> {
151 if self.br.bits == 0 {
152 self.refill();
153 if self.br.bits == 0 { return Err(BitReaderError::BitstreamEnd); }
154 }
155 let ret = (self.br.bitbuf & 1) != 0;
156 self.skip_cache(1);
157 Ok(ret)
158 }
159 fn peek(&mut self, nbits: u8) -> u32 {
160 if nbits == 0 || nbits > 16 { return 0; }
161 if self.br.bits < nbits {
162 self.refill();
163 }
164 self.br.bitbuf & ((1 << nbits) - 1)
165 }
166 fn skip(&mut self, nbits: u32) -> BitReaderResult<()> {
167 if u32::from(self.br.bits) >= nbits {
168 self.skip_cache(nbits as u8);
169 } else {
170 unreachable!();
171 }
172 Ok(())
173 }
174 fn align(&mut self) {
175 let b = self.br.bits & 7;
176 if b != 0 {
177 self.skip_cache(b);
178 }
179 }
180 fn left(&self) -> isize {
181 ((self.src.len() as isize) - (self.br.pos as isize)) * 8 + (self.br.bits as isize)
182 }
183 }
184
185 impl<'a, S: Copy> CodebookReader<S> for CurrentSource<'a> {
186 fn read_cb(&mut self, cb: &Codebook<S>) -> CodebookResult<S> {
187 let mut esc = true;
188 let mut idx = 0;
189 let mut lut_bits = cb.lut_bits;
190 let orig_br = self.br;
191 while esc {
192 let lut_idx = (self.peek(lut_bits) as usize) + (idx as usize);
193 if cb.table[lut_idx] == TABLE_FILL_VALUE { return Err(CodebookError::InvalidCode); }
194 let bits = cb.table[lut_idx] & 0x7F;
195 esc = (cb.table[lut_idx] & 0x80) != 0;
196 idx = (cb.table[lut_idx] >> 8) as usize;
197 let skip_bits = if esc { u32::from(lut_bits) } else { bits };
198 if (skip_bits as isize) > self.left() {
199 self.br = orig_br;
200 self.refill();
201 return Err(CodebookError::MemoryError);
202 }
203 self.skip(skip_bits as u32).unwrap();
204 lut_bits = bits as u8;
205 }
206 Ok(cb.syms[idx])
207 }
208 }
209
210 enum InflateState {
211 Start,
212 BlockStart,
213 BlockMode,
214 StaticBlockLen,
215 StaticBlockInvLen(u32),
216 StaticBlockCopy(usize),
217 FixedBlock,
218 FixedBlockLengthExt(usize, u8),
219 FixedBlockDist(usize),
220 FixedBlockDistExt(usize, usize, u8),
221 FixedBlockCopy(usize, usize),
222 FixedBlockLiteral(u8),
223 DynBlockHlit,
224 DynBlockHdist,
225 DynBlockHclen,
226 DynLengths(usize),
227 DynCodeLengths,
228 DynCodeLengthsAdd(usize),
229 DynBlock,
230 DynBlockLengthExt(usize, u8),
231 DynBlockDist(usize),
232 DynBlockDistExt(usize, usize, u8),
233 DynCopy(usize, usize),
234 DynBlockLiteral(u8),
235 End,
236 }
237
238 ///! The decompressor for deflated streams (RFC 1951).
239 pub struct Inflate {
240 br: BitReaderState,
241 fix_len_cb: Codebook<u16>,
242
243 buf: [u8; 65536],
244 bpos: usize,
245 output_idx: usize,
246 full_pos: usize,
247
248 state: InflateState,
249 final_block: bool,
250 hlit: usize,
251 hdist: usize,
252 dyn_len_cb: Option<Codebook<u32>>,
253 dyn_lit_cb: Option<Codebook<u32>>,
254 dyn_dist_cb: Option<Codebook<u32>>,
255 len_lengths: [u8; 19],
256 all_lengths: [u8; NUM_LITERALS + NUM_DISTS],
257 cur_len_idx: usize,
258 }
259
260 const LENGTH_ADD_BITS: [u8; 29] = [
261 0, 0, 0, 0, 0, 0, 0, 0, 1, 1,
262 1, 1, 2, 2, 2, 2, 3, 3, 3, 3,
263 4, 4, 4, 4, 5, 5, 5, 5, 0
264 ];
265 const LENGTH_BASE: [u16; 29] = [
266 3, 4, 5, 6, 7, 8, 9, 10, 11, 13,
267 15, 17, 19, 23, 27, 31, 35, 43, 51, 59,
268 67, 83, 99, 115, 131, 163, 195, 227, 258
269 ];
270 const DIST_ADD_BITS: [u8; 30] = [
271 0, 0, 0, 0, 1, 1, 2, 2, 3, 3,
272 4, 4, 5, 5, 6, 6, 7, 7, 8, 8,
273 9, 9, 10, 10, 11, 11, 12, 12, 13, 13
274 ];
275 const DIST_BASE: [u16; 30] = [
276 1, 2, 3, 4, 5, 7, 9, 13, 17, 25,
277 33, 49, 65, 97, 129, 193, 257, 385, 513, 769,
278 1025, 1537, 2049, 3073, 4097, 6145, 8193, 12289, 16385, 24577
279 ];
280 const LEN_RECODE: [usize; 19] = [
281 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15
282 ];
283 const REPEAT_BITS: [u8; 3] = [ 2, 3, 7 ];
284 const REPEAT_BASE: [u8; 3] = [ 3, 3, 11 ];
285
286 macro_rules! read_bits {
287 ($self: expr, $csrc: expr, $bits: expr) => ({
288 if $csrc.left() < $bits as isize {
289 $self.br = $csrc.br;
290 return Err(DecompressError::ShortData);
291 }
292 $csrc.read($bits).unwrap()
293 })
294 }
295
296 macro_rules! read_cb {
297 ($self: expr, $csrc: expr, $cb: expr) => ({
298 let ret = $csrc.read_cb($cb);
299 if let Err(CodebookError::MemoryError) = ret {
300 $self.br = $csrc.br;
301 return Err(DecompressError::ShortData);
302 }
303 match ret {
304 Ok(val) => val,
305 Err(_) => {
306 $self.state = InflateState::End;
307 return Err(DecompressError::InvalidData);
308 },
309 }
310 })
311 }
312
313 impl Inflate {
314 ///! Creates a new instance of `Inflate` struct.
315 pub fn new() -> Self {
316 let mut cr = FixedLenCodeReader {};
317 let fix_len_cb = Codebook::new(&mut cr, CodebookMode::LSB).unwrap();
318 Self {
319 br: BitReaderState::default(),
320 fix_len_cb,
321
322 buf: [0; 65536],
323 bpos: 0,
324 output_idx: 0,
325 full_pos: 0,
326
327 state: InflateState::Start,
328 final_block: false,
329 dyn_len_cb: None,
330 dyn_lit_cb: None,
331 dyn_dist_cb: None,
332 hlit: 0,
333 hdist: 0,
334 len_lengths: [0; 19],
335 all_lengths: [0; NUM_LITERALS + NUM_DISTS],
336 cur_len_idx: 0,
337 }
338 }
339 fn put_literal(&mut self, val: u8) {
340 self.buf[self.bpos] = val;
341 self.bpos = (self.bpos + 1) & (self.buf.len() - 1);
342 self.full_pos += 1;
343 }
344 fn lz_copy(&mut self, offset: usize, len: usize, dst: &mut [u8]) -> DecompressResult<()> {
345 let mask = self.buf.len() - 1;
346 if offset > self.full_pos {
347 return Err(DecompressError::InvalidData);
348 }
349 let cstart = (self.bpos.wrapping_sub(offset)) & mask;
350 for i in 0..len {
351 self.buf[(self.bpos + i) & mask] = self.buf[(cstart + i) & mask];
352 dst[i] = self.buf[(cstart + i) & mask];
353 }
354 self.bpos = (self.bpos + len) & mask;
355 self.full_pos += len;
356 Ok(())
357 }
358 ///! Reports whether decoder has finished decoding the input.
359 pub fn is_finished(&self) -> bool {
360 match self.state {
361 InflateState::End => true,
362 _ => false,
363 }
364 }
365 ///! Reports the current amount of bytes output into the destination buffer after the last run.
366 pub fn get_current_output_size(&self) -> usize { self.output_idx }
367 ///! Reports the total amount of bytes decoded so far.
368 pub fn get_total_output_size(&self) -> usize { self.bpos }
369 ///! Tries to decompress input data and write it to the output buffer.
370 ///!
371 ///! Since the decompressor can work with arbitrary input and output chunks its return value may have several meanings:
372 ///! * `Ok(len)` means the stream has been fully decoded and then number of bytes output into the destination buffer is returned.
373 ///! * [`DecompressError::ShortData`] means the input stream has been fully read but more data is needed.
374 ///! * [`DecompressError::OutputFull`] means the output buffer is full and should be flushed. Then decoding should continue on the same input block with `continue_block` parameter set to `true`.
375 ///!
376 ///! [`DecompressError::ShortData`]: ../enum.DecompressError.html#variant.ShortData
377 ///! [`DecompressError::OutputFull`]: ../enum.DecompressError.html#variant.OutputFull
378 #[allow(clippy::comparison_chain)]
379 pub fn decompress_data(&mut self, src: &[u8], dst: &mut [u8], continue_block: bool) -> DecompressResult<usize> {
380 if src.is_empty() || dst.is_empty() {
381 return Err(DecompressError::InvalidArgument);
382 }
383 let mut csrc = if !continue_block {
384 CurrentSource::new(src, self.br)
385 } else {
386 self.output_idx = 0;
387 CurrentSource::reinit(src, self.br)
388 };
389 'main: loop {
390 match self.state {
391 InflateState::Start | InflateState::BlockStart => {
392 if csrc.left() == 0 {
393 self.br = csrc.br;
394 return Err(DecompressError::ShortData);
395 }
396 self.final_block = csrc.read_bool().unwrap();
397 self.state = InflateState::BlockMode;
398 },
399 InflateState::BlockMode => {
400 let bmode = read_bits!(self, csrc, 2);
401 match bmode {
402 0 => {
403 csrc.align();
404 self.state = InflateState::StaticBlockLen;
405 },
406 1 => { self.state = InflateState::FixedBlock; },
407 2 => { self.state = InflateState::DynBlockHlit; },
408 _ => {
409 self.state = InflateState::End;
410 return Err(DecompressError::InvalidHeader);
411 },
412 };
413 },
414 InflateState::StaticBlockLen => {
415 let len = read_bits!(self, csrc, 16);
416 self.state = InflateState::StaticBlockInvLen(len);
417 },
418 InflateState::StaticBlockInvLen(len) => {
419 let inv_len = read_bits!(self, csrc, 16);
420 if len != !inv_len {
421 self.state = InflateState::End;
422 return Err(DecompressError::InvalidHeader);
423 }
424 self.state = InflateState::StaticBlockCopy(len as usize);
425 },
426 InflateState::StaticBlockCopy(len) => {
427 for i in 0..len {
428 if csrc.left() < 8 {
429 self.br = csrc.br;
430 self.state = InflateState::StaticBlockCopy(len - i);
431 return Err(DecompressError::ShortData);
432 }
433 let val = csrc.read(8).unwrap() as u8;
434 self.put_literal(val);
435 }
436 self.state = InflateState::BlockStart;
437 }
438 InflateState::FixedBlock => {
439 let val = read_cb!(self, csrc, &self.fix_len_cb);
440 if val < 256 {
441 if self.output_idx >= dst.len() {
442 self.br = csrc.br;
443 self.state = InflateState::FixedBlockLiteral(val as u8);
444 return Err(DecompressError::OutputFull);
445 }
446 self.put_literal(val as u8);
447 dst[self.output_idx] = val as u8;
448 self.output_idx += 1;
449 } else if val == 256 {
450 if self.final_block {
451 self.state = InflateState::End;
452 return Ok(self.output_idx);
453 } else {
454 self.state = InflateState::BlockStart;
455 }
456 } else {
457 let len_idx = (val - 257) as usize;
458 if len_idx >= LENGTH_BASE.len() {
459 self.state = InflateState::End;
460 return Err(DecompressError::InvalidData);
461 }
462 let len_bits = LENGTH_ADD_BITS[len_idx];
463 let add_base = LENGTH_BASE[len_idx] as usize;
464 if len_bits > 0 {
465 self.state = InflateState::FixedBlockLengthExt(add_base, len_bits);
466 } else {
467 self.state = InflateState::FixedBlockDist(add_base);
468 }
469 }
470 },
471 InflateState::FixedBlockLiteral(sym) => {
472 if self.output_idx >= dst.len() {
473 self.br = csrc.br;
474 return Err(DecompressError::OutputFull);
475 }
476 self.put_literal(sym);
477 dst[self.output_idx] = sym;
478 self.output_idx += 1;
479 self.state = InflateState::FixedBlock;
480 },
481 InflateState::FixedBlockLengthExt(base, bits) => {
482 let add = read_bits!(self, csrc, bits) as usize;
483 self.state = InflateState::FixedBlockDist(base + add);
484 },
485 InflateState::FixedBlockDist(length) => {
486 let dist_idx = reverse_bits(read_bits!(self, csrc, 5), 5) as usize;
487 if dist_idx >= DIST_BASE.len() {
488 self.state = InflateState::End;
489 return Err(DecompressError::InvalidData);
490 }
491 let dist_bits = DIST_ADD_BITS[dist_idx];
492 let dist_base = DIST_BASE[dist_idx] as usize;
493 if dist_bits == 0 {
494 self.state = InflateState::FixedBlockCopy(length, dist_base);
495 } else {
496 self.state = InflateState::FixedBlockDistExt(length, dist_base, dist_bits);
497 }
498 },
499 InflateState::FixedBlockDistExt(length, base, bits) => {
500 let add = read_bits!(self, csrc, bits) as usize;
501 self.state = InflateState::FixedBlockCopy(length, base + add);
502 },
503 InflateState::FixedBlockCopy(length, dist) => {
504 if self.output_idx + length > dst.len() {
505 let copy_size = dst.len() - self.output_idx;
506 let ret = self.lz_copy(dist, copy_size, &mut dst[self.output_idx..]);
507 if ret.is_err() {
508 self.state = InflateState::End;
509 return Err(DecompressError::InvalidData);
510 }
511 self.output_idx += copy_size;
512 self.br = csrc.br;
513 self.state = InflateState::FixedBlockCopy(length - copy_size, dist);
514 return Err(DecompressError::OutputFull);
515 }
516 let ret = self.lz_copy(dist, length, &mut dst[self.output_idx..]);
517 if ret.is_err() {
518 self.state = InflateState::End;
519 return Err(DecompressError::InvalidData);
520 }
521 self.output_idx += length;
522 self.state = InflateState::FixedBlock;
523 }
524 InflateState::DynBlockHlit => {
525 self.hlit = (read_bits!(self, csrc, 5) as usize) + 257;
526 if self.hlit >= 287 {
527 self.state = InflateState::End;
528 return Err(DecompressError::InvalidHeader);
529 }
530 self.state = InflateState::DynBlockHdist;
531 }
532 InflateState::DynBlockHdist => {
533 self.hdist = (read_bits!(self, csrc, 5) as usize) + 1;
534 self.state = InflateState::DynBlockHclen;
535 },
536 InflateState::DynBlockHclen => {
537 let hclen = (read_bits!(self, csrc, 4) as usize) + 4;
538 self.cur_len_idx = 0;
539 self.len_lengths = [0; 19];
540 self.all_lengths = [0; NUM_LITERALS + NUM_DISTS];
541 self.state = InflateState::DynLengths(hclen);
542 },
543 InflateState::DynLengths(len) => {
544 for i in 0..len {
545 if csrc.left() < 3 {
546 self.br = csrc.br;
547 self.state = InflateState::DynLengths(len - i);
548 return Err(DecompressError::ShortData);
549 }
550 self.len_lengths[LEN_RECODE[self.cur_len_idx]] = csrc.read(3).unwrap() as u8;
551 self.cur_len_idx += 1;
552 }
553 let mut len_codes = [ShortCodebookDesc { code: 0, bits: 0 }; 19];
554 lengths_to_codes(&self.len_lengths, &mut len_codes)?;
555 let mut cr = ShortCodebookDescReader::new(len_codes.to_vec());
556 let ret = Codebook::new(&mut cr, CodebookMode::LSB);
557 if ret.is_err() {
558 self.state = InflateState::End;
559 return Err(DecompressError::InvalidHeader);
560 }
561 self.dyn_len_cb = Some(ret.unwrap());
562 self.cur_len_idx = 0;
563 self.state = InflateState::DynCodeLengths;
564 },
565 InflateState::DynCodeLengths => {
566 if let Some(ref len_cb) = self.dyn_len_cb {
567 while self.cur_len_idx < self.hlit + self.hdist {
568 let ret = csrc.read_cb(len_cb);
569 let val = match ret {
570 Ok(val) => val,
571 Err(CodebookError::MemoryError) => {
572 self.br = csrc.br;
573 return Err(DecompressError::ShortData);
574 },
575 Err(_) => {
576 self.state = InflateState::End;
577 return Err(DecompressError::InvalidHeader);
578 },
579 };
580 if val < 16 {
581 self.all_lengths[self.cur_len_idx] = val as u8;
582 self.cur_len_idx += 1;
583 } else {
584 let idx = (val as usize) - 16;
585 if idx > 2 {
586 self.state = InflateState::End;
587 return Err(DecompressError::InvalidHeader);
588 }
589 self.state = InflateState::DynCodeLengthsAdd(idx);
590 continue 'main;
591 }
592 }
593 let (lit_lengths, dist_lengths) = self.all_lengths.split_at(self.hlit);
594
595 let mut lit_codes = [ShortCodebookDesc { code: 0, bits: 0 }; NUM_LITERALS];
596 lengths_to_codes(&lit_lengths, &mut lit_codes)?;
597 let mut cr = ShortCodebookDescReader::new(lit_codes.to_vec());
598 let ret = Codebook::new(&mut cr, CodebookMode::LSB);
599 if ret.is_err() { return Err(DecompressError::InvalidHeader); }
600 self.dyn_lit_cb = Some(ret.unwrap());
601
602 let mut dist_codes = [ShortCodebookDesc { code: 0, bits: 0 }; NUM_DISTS];
603 lengths_to_codes(&dist_lengths[..self.hdist], &mut dist_codes)?;
604 let mut cr = ShortCodebookDescReader::new(dist_codes.to_vec());
605 let ret = Codebook::new(&mut cr, CodebookMode::LSB);
606 if ret.is_err() { return Err(DecompressError::InvalidHeader); }
607 self.dyn_dist_cb = Some(ret.unwrap());
608
609 self.state = InflateState::DynBlock;
610 } else {
611 unreachable!();
612 }
613 },
614 InflateState::DynCodeLengthsAdd(mode) => {
615 let base = REPEAT_BASE[mode] as usize;
616 let bits = REPEAT_BITS[mode];
617 let len = base + read_bits!(self, csrc, bits) as usize;
618 if self.cur_len_idx + len > self.hlit + self.hdist {
619 self.state = InflateState::End;
620 return Err(DecompressError::InvalidHeader);
621 }
622 let rpt = if mode == 0 {
623 if self.cur_len_idx == 0 {
624 self.state = InflateState::End;
625 return Err(DecompressError::InvalidHeader);
626 }
627 self.all_lengths[self.cur_len_idx - 1]
628 } else {
629 0
630 };
631 for _ in 0..len {
632 self.all_lengths[self.cur_len_idx] = rpt;
633 self.cur_len_idx += 1;
634 }
635 self.state = InflateState::DynCodeLengths;
636 },
637 InflateState::DynBlock => {
638 if let Some(ref lit_cb) = self.dyn_lit_cb {
639 let val = read_cb!(self, csrc, lit_cb);
640 if val < 256 {
641 if self.output_idx >= dst.len() {
642 self.br = csrc.br;
643 self.state = InflateState::DynBlockLiteral(val as u8);
644 return Err(DecompressError::OutputFull);
645 }
646 self.put_literal(val as u8);
647 dst[self.output_idx] = val as u8;
648 self.output_idx += 1;
649 } else if val == 256 {
650 if self.final_block {
651 self.state = InflateState::End;
652 return Ok(self.output_idx);
653 } else {
654 self.state = InflateState::BlockStart;
655 }
656 } else {
657 let len_idx = (val - 257) as usize;
658 if len_idx >= LENGTH_BASE.len() {
659 self.state = InflateState::End;
660 return Err(DecompressError::InvalidData);
661 }
662 let len_bits = LENGTH_ADD_BITS[len_idx];
663 let add_base = LENGTH_BASE[len_idx] as usize;
664 if len_bits > 0 {
665 self.state = InflateState::DynBlockLengthExt(add_base, len_bits);
666 } else {
667 self.state = InflateState::DynBlockDist(add_base);
668 }
669 }
670 } else {
671 unreachable!();
672 }
673 },
674 InflateState::DynBlockLiteral(sym) => {
675 if self.output_idx >= dst.len() {
676 self.br = csrc.br;
677 return Err(DecompressError::OutputFull);
678 }
679 self.put_literal(sym);
680 dst[self.output_idx] = sym;
681 self.output_idx += 1;
682 self.state = InflateState::DynBlock;
683 },
684 InflateState::DynBlockLengthExt(base, bits) => {
685 let add = read_bits!(self, csrc, bits) as usize;
686 self.state = InflateState::DynBlockDist(base + add);
687 },
688 InflateState::DynBlockDist(length) => {
689 if let Some(ref dist_cb) = self.dyn_dist_cb {
690 let dist_idx = read_cb!(self, csrc, dist_cb) as usize;
691 if dist_idx >= DIST_BASE.len() {
692 self.state = InflateState::End;
693 return Err(DecompressError::InvalidData);
694 }
695 let dist_bits = DIST_ADD_BITS[dist_idx];
696 let dist_base = DIST_BASE[dist_idx] as usize;
697 if dist_bits == 0 {
698 self.state = InflateState::DynCopy(length, dist_base);
699 } else {
700 self.state = InflateState::DynBlockDistExt(length, dist_base, dist_bits);
701 }
702 } else {
703 unreachable!();
704 }
705 },
706 InflateState::DynBlockDistExt(length, base, bits) => {
707 let add = read_bits!(self, csrc, bits) as usize;
708 self.state = InflateState::DynCopy(length, base + add);
709 },
710 InflateState::DynCopy(length, dist) => {
711 if self.output_idx + length > dst.len() {
712 let copy_size = dst.len() - self.output_idx;
713 let ret = self.lz_copy(dist, copy_size, &mut dst[self.output_idx..]);
714 if ret.is_err() {
715 self.state = InflateState::End;
716 return Err(DecompressError::InvalidData);
717 }
718 self.output_idx += copy_size;
719 self.br = csrc.br;
720 self.state = InflateState::DynCopy(length - copy_size, dist);
721 return Err(DecompressError::OutputFull);
722 }
723 let ret = self.lz_copy(dist, length, &mut dst[self.output_idx..]);
724 if ret.is_err() {
725 self.state = InflateState::End;
726 return Err(DecompressError::InvalidData);
727 }
728 self.output_idx += length;
729 self.state = InflateState::DynBlock;
730 }
731 InflateState::End => {
732 return Ok(0);
733 },
734 }
735 }
736 }
737 ///! Decompresses input data into output returning the uncompressed data length.
738 pub fn uncompress(src: &[u8], dst: &mut [u8]) -> DecompressResult<usize> {
739 let mut inflate = Self::new();
740 let off = if src.len() > 2 && src[0] == 0x78 && (src[1] != 0 && ((src[1] - 1) % 31) == 0) { 2 } else { 0 };
741 inflate.decompress_data(&src[off..], dst, false)
742 }
743 }
744
745 impl Default for Inflate {
746 fn default() -> Self {
747 Self::new()
748 }
749 }
750
751 fn lengths_to_codes(lens: &[u8], codes: &mut [ShortCodebookDesc]) -> DecompressResult<()> {
752 let mut bits = [0u32; 32];
753 let mut pfx = [0u32; 33];
754 for len in lens.iter() {
755 let len = *len as usize;
756 if len >= bits.len() {
757 return Err(DecompressError::InvalidHeader);
758 }
759 bits[len] += 1;
760 }
761 bits[0] = 0;
762 let mut code = 0;
763 for i in 0..bits.len() {
764 code = (code + bits[i]) << 1;
765 pfx[i + 1] = code;
766 }
767
768 for (len, codes) in lens.iter().zip(codes.iter_mut()) {
769 let len = *len as usize;
770 if len != 0 {
771 let bits = len as u8;
772 *codes = ShortCodebookDesc { code: reverse_bits(pfx[len], bits), bits };
773 pfx[len] += 1;
774 } else {
775 *codes = ShortCodebookDesc { code: 0, bits: 0 };
776 }
777 }
778
779 Ok(())
780 }
781
782 struct GzipCRC32 {
783 tab: [u32; 256],
784 crc: u32,
785 }
786
787 impl GzipCRC32 {
788 #[allow(clippy::unreadable_literal)]
789 fn new() -> Self {
790 let mut tab = [0u32; 256];
791 for i in 0..256 {
792 let mut c = i as u32;
793 for _ in 0..8 {
794 if (c & 1) != 0 {
795 c = 0xEDB88320 ^ (c >> 1);
796 } else {
797 c >>= 1;
798 }
799 }
800 tab[i] = c;
801 }
802 Self { tab, crc: 0 }
803 }
804 fn update_crc(&mut self, src: &[u8]) {
805 let mut c = !self.crc;
806 for el in src.iter() {
807 c = self.tab[((c ^ u32::from(*el)) & 0xFF) as usize] ^ (c >> 8);
808 }
809 self.crc = !c;
810 }
811 }
812
813 ///! Decodes input data in gzip file format (RFC 1952) returning a vector containing decoded data.
814 pub fn gzip_decode(br: &mut ByteReader, skip_crc: bool) -> DecompressResult<Vec<u8>> {
815 const FLAG_HCRC: u8 = 0x02;
816 const FLAG_EXTRA: u8 = 0x04;
817 const FLAG_NAME: u8 = 0x08;
818 const FLAG_COMMENT: u8 = 0x10;
819
820 let id1 = br.read_byte()?;
821 let id2 = br.read_byte()?;
822 let cm = br.read_byte()?;
823 let flg = br.read_byte()?;
824 let _mtime = br.read_u32le()?;
825 let _xfl = br.read_byte()?;
826 let _os = br.read_byte()?;
827 if id1 != 0x1F || id2 != 0x8B || cm != 8 {
828 return Err(DecompressError::InvalidHeader);
829 }
830
831 if (flg & FLAG_EXTRA) != 0 {
832 let xlen = br.read_u16le()? as usize;
833 br.read_skip(xlen)?;
834 }
835 if (flg & FLAG_NAME) != 0 {
836 loop {
837 let b = br.read_byte()?;
838 if b == 0 {
839 break;
840 }
841 }
842 }
843 if (flg & FLAG_COMMENT) != 0 {
844 loop {
845 let b = br.read_byte()?;
846 if b == 0 {
847 break;
848 }
849 }
850 }
851 let _hcrc = if (flg & FLAG_HCRC) != 0 {
852 br.read_u16le()?
853 } else {
854 0
855 };
856 if (flg & 0xE0) != 0 {
857 return Err(DecompressError::Unsupported);
858 }
859
860 let mut output: Vec<u8> = Vec::new();
861 let mut tail = [0u8; 8];
862 let mut inblk = [0u8; 1024];
863 let mut oblk = [0u8; 4096];
864 let mut inflate = Inflate::new();
865 let mut checker = GzipCRC32::new();
866
867 loop {
868 let ret = br.read_buf_some(&mut inblk);
869 if let Err(ByteIOError::EOF) = ret {
870 break;
871 }
872 let inlen = match ret {
873 Ok(val) => val,
874 Err(_) => return Err(DecompressError::IOError),
875 };
876 let mut repeat = false;
877 loop {
878 let ret = inflate.decompress_data(&inblk[..inlen], &mut oblk, repeat);
879 match ret {
880 Ok(outlen) => {
881 checker.update_crc(&oblk[..outlen]);
882 output.extend_from_slice(&oblk[..outlen]);
883 break;
884 },
885 Err(DecompressError::ShortData) => {
886 break;
887 },
888 Err(DecompressError::OutputFull) => {
889 repeat = true;
890 checker.update_crc(&oblk);
891 output.extend_from_slice(&oblk);
892 },
893 Err(err) => {
894 return Err(err);
895 },
896 }
897 }
898 // Save last 8 bytes for CRC and size.
899 if inlen >= 8 {
900 tail.copy_from_slice(&inblk[inlen - 8..][..8]);
901 } else {
902 let shift_len = 8 - inlen;
903 for i in 0..shift_len {
904 tail[i] = tail[i + inlen];
905 }
906 for i in shift_len..8 {
907 tail[i] = inblk[i - shift_len];
908 }
909 }
910 }
911 if !skip_crc {
912 if !inflate.is_finished() { println!("???"); }
913 let crc = read_u32le(&tail[0..4])?;
914 let size = read_u32le(&tail[4..8])?;
915 if size != (output.len() as u32) {
916 return Err(DecompressError::CRCError);
917 }
918 if crc != checker.crc {
919 return Err(DecompressError::CRCError);
920 }
921 }
922
923 Ok(output)
924 }
925
926 #[derive(Clone,Copy,Default)]
927 struct Token {
928 sym: u16,
929 distsym: u8,
930 len: u16,
931 dist: u16,
932 }
933
934 const TOKEN_EOB: Token = Token { sym: 256, distsym: 0, len: 0, dist: 0 };
935
936 impl Token {
937 fn from_literal(sym: u8) -> Self {
938 Self {
939 sym: u16::from(sym),
940 distsym: 0,
941 dist: 0,
942 len: 0,
943 }
944 }
945 fn from_match(dist: u16, len: u16) -> Self {
946 let sym = match len {
947 3..= 10 => 257 + len - 3,
948 11..= 18 => 265 + (len - 11) / 2,
949 19..= 34 => 269 + (len - 19) / 4,
950 35..= 66 => 273 + (len - 35) / 8,
951 67..=130 => 277 + (len - 67) / 16,
952 131..=257 => 281 + (len - 131) / 32,
953 _ => 285,
954 };
955 let distsym = if dist <= 4 {
956 (dist - 1) as u8
957 } else {
958 let bits = 16 - (dist - 1).leading_zeros();
959 (bits as u8) * 2 - 2 + if ((dist - 1) & (1 << (bits - 2))) != 0 { 1 } else { 0 }
960 };
961 Self {
962 sym, distsym, len, dist
963 }
964 }
965 }
966
967 fn add_codes(lens: &[u8], stats: &mut [u32], toks: &mut Vec<(u8, u8)>) {
968 let mut last = 42;
969 let mut lcount = 0;
970
971 for &len in lens.iter() {
972 if len == last {
973 lcount += 1;
974 } else {
975 if last == 0 {
976 while lcount > 10 {
977 let run = lcount.min(138);
978 stats[18] += 1;
979 toks.push((18, run - 11));
980 lcount -= run;
981 }
982 if lcount >= 3 {
983 stats[17] += 1;
984 toks.push((17, lcount - 3));
985 lcount = 0;
986 }
987 for _ in 0..lcount {
988 stats[0] += 1;
989 toks.push((0, 0));
990 }
991 } else {
992 while lcount >= 3 {
993 let run = lcount.min(6);
994 stats[16] += 1;
995 toks.push((16, run - 3));
996 lcount -= run;
997 }
998 for _ in 0..lcount {
999 stats[last as usize] += 1;
1000 toks.push((last, 0));
1001 }
1002 }
1003 stats[len as usize] += 1;
1004 toks.push((len, 0));
1005 last = len;
1006 lcount = 0;
1007 }
1008 }
1009 if lcount > 0 {
1010 if last == 0 {
1011 while lcount > 10 {
1012 let run = lcount.min(138);
1013 stats[18] += 1;
1014 toks.push((18, run - 11));
1015 lcount -= run;
1016 }
1017 if lcount >= 3 {
1018 stats[17] += 1;
1019 toks.push((17, lcount - 3));
1020 lcount = 0;
1021 }
1022 for _ in 0..lcount {
1023 stats[0] += 1;
1024 toks.push((0, 0));
1025 }
1026 } else {
1027 while lcount >= 3 {
1028 let run = lcount.min(6);
1029 stats[16] += 1;
1030 toks.push((16, run - 3));
1031 lcount -= run;
1032 }
1033 for _ in 0..lcount {
1034 stats[last as usize] += 1;
1035 toks.push((last, 0));
1036 }
1037 }
1038 }
1039 }
1040
1041 ///! Deflate stream writer.
1042 pub struct DeflateWriter {
1043 dst: Vec<u8>,
1044 bits: u8,
1045 bbuf: u32,
1046 }
1047
1048 impl DeflateWriter {
1049 ///! Creates a new instance of `DeflateWriter` for a provided output.
1050 pub fn new(dst: Vec<u8>) -> Self {
1051 Self {
1052 dst,
1053 bits: 0,
1054 bbuf: 0,
1055 }
1056 }
1057 fn align(&mut self) {
1058 if (self.bits & 7) != 0 {
1059 self.bits += 8 - (self.bits & 7);
1060 }
1061 }
1062 fn flush(&mut self) {
1063 while self.bits >= 8 {
1064 self.dst.push(self.bbuf as u8);
1065 self.bbuf >>= 8;
1066 self.bits -= 8;
1067 }
1068 }
1069 fn write(&mut self, val: u16, len: u8) {
1070 self.flush();
1071 self.bbuf |= u32::from(val) << self.bits;
1072 self.bits += len;
1073 }
1074 ///! Finishes writing the stream and returns the output vector.
1075 pub fn end(mut self) -> Vec<u8> {
1076 self.flush();
1077 if self.bits > 0 {
1078 self.dst.push(self.bbuf as u8);
1079 }
1080 self.dst
1081 }
1082
1083 fn write_codes(&mut self, codes: &CodeHuff, dists: &DistHuff) {
1084 let mut stats = [0u32; 19];
1085 let mut toks = Vec::with_capacity(NUM_LITERALS + NUM_DISTS);
1086 let mut cw = [0u16; 19];
1087 let mut cl = [0u8; 19];
1088 let mut nc = 0;
1089
1090 add_codes(&codes.lens[..codes.num_codes], &mut stats, &mut toks);
1091 add_codes(&dists.lens[..dists.num_codes], &mut stats, &mut toks);
1092
1093 gen_tree(&mut cw, &mut cl, &mut nc, &mut stats, 7);
1094
1095 nc = cw.len();
1096 for &idx in LEN_RECODE.iter().rev() {
1097 if cl[idx] == 0 {
1098 nc -= 1;
1099 } else {
1100 break;
1101 }
1102 }
1103 if nc < 4 {
1104 nc = 4;
1105 }
1106 self.write((nc - 4) as u16, 4);
1107 for &idx in LEN_RECODE.iter().take(nc) {
1108 self.write(u16::from(cl[idx]), 3);
1109 }
1110 for &(sym, add) in toks.iter() {
1111 self.write(cw[sym as usize], cl[sym as usize]);
1112 match sym {
1113 16 => self.write(u16::from(add), 2),
1114 17 => self.write(u16::from(add), 3),
1115 18 => self.write(u16::from(add), 7),
1116 _ => {},
1117 };
1118 }
1119 }
1120 fn write_tokens(&mut self, src: &[Token], codes: &CodeHuff, dists: &DistHuff) {
1121 for &tok in src.iter() {
1122 self.write(codes.codes[tok.sym as usize], codes.lens[tok.sym as usize]);
1123 if tok.sym > 256 {
1124 self.write_len_bits(tok.len);
1125 self.write(dists.codes[tok.distsym as usize], dists.lens[tok.distsym as usize]);
1126 self.write_dist_bits(tok.dist);
1127 }
1128 }
1129 }
1130 fn write_len_bits(&mut self, len: u16) {
1131 let llen = len - 3;
1132 if llen >= 8 && llen < 255 {
1133 let bits = (16 - llen.leading_zeros() - 3) as u8;
1134 self.write(llen & ((1 << bits) - 1), bits);
1135 }
1136 }
1137 fn write_dist_bits(&mut self, dist: u16) {
1138 let ddist = dist - 1;
1139 if dist >= 4 {
1140 let bits = (16 - ddist.leading_zeros() - 2) as u8;
1141 self.write(ddist & ((1 << bits) - 1), bits);
1142 }
1143 }
1144 }
1145
1146 struct CodeHuff {
1147 is_fixed: bool,
1148 stats: [u32; NUM_LITERALS],
1149 codes: [u16; NUM_LITERALS],
1150 lens: [u8; NUM_LITERALS],
1151 num_codes: usize,
1152 }
1153
1154 impl CodeHuff {
1155 fn new(is_fixed: bool) -> Self {
1156 Self {
1157 is_fixed,
1158 stats: [0; NUM_LITERALS],
1159 codes: [0; NUM_LITERALS],
1160 lens: [0; NUM_LITERALS],
1161 num_codes: NUM_LITERALS,
1162 }
1163 }
1164 fn make_codes(&mut self, src: &[Token]) {
1165 if self.is_fixed {
1166 for i in 0..=143 {
1167 self.codes[i] = reverse_bits((i + 0x30) as u32, 8) as u16;
1168 self.lens[i] = 8;
1169 }
1170 for i in 144..=255 {
1171 self.codes[i] = reverse_bits((i + 0x100) as u32, 9) as u16;
1172 self.lens[i] = 9;
1173 }
1174 for i in 256..=279 {
1175 self.codes[i] = reverse_bits((i & 0x1F) as u32, 7) as u16;
1176 self.lens[i] = 7;
1177 }
1178 for i in 280..NUM_LITERALS {
1179 self.codes[i] = reverse_bits((i - 280 + 0xC0) as u32, 8) as u16;
1180 self.lens[i] = 8;
1181 }
1182 } else {
1183 for &tok in src.iter() {
1184 self.stats[tok.sym as usize] += 1;
1185 }
1186 gen_tree(&mut self.codes, &mut self.lens, &mut self.num_codes, &mut self.stats, 15);
1187 if self.num_codes < 257 {
1188 self.num_codes = 257;
1189 }
1190 }
1191 }
1192 }
1193
1194 struct DistHuff {
1195 is_fixed: bool,
1196 stats: [u32; NUM_DISTS],
1197 codes: [u16; NUM_DISTS],
1198 lens: [u8; NUM_DISTS],
1199 num_codes: usize,
1200 }
1201
1202 impl DistHuff {
1203 fn new(is_fixed: bool) -> Self {
1204 Self {
1205 is_fixed,
1206 stats: [0; NUM_DISTS],
1207 codes: [0; NUM_DISTS],
1208 lens: [0; NUM_DISTS],
1209 num_codes: NUM_DISTS,
1210 }
1211 }
1212 fn make_codes(&mut self, src: &[Token]) {
1213 if self.is_fixed {
1214 for i in 0..NUM_DISTS {
1215 self.codes[i] = reverse_bits(i as u32, 5) as u16;
1216 self.lens[i] = 5;
1217 }
1218 } else {
1219 for &tok in src.iter() {
1220 if tok.sym > 256 {
1221 self.stats[tok.distsym as usize] += 1;
1222 }
1223 }
1224 gen_tree(&mut self.codes, &mut self.lens, &mut self.num_codes, &mut self.stats, 15);
1225 if self.num_codes < 1 {
1226 self.num_codes = 1;
1227 }
1228 }
1229 }
1230 }
1231
1232 #[derive(Clone,Copy,Default)]
1233 struct Node {
1234 sym: u16,
1235 w: u16,
1236 idx0: u16,
1237 idx1: u16,
1238 }
1239
1240 const NODE_SYM: u16 = 65500;
1241
1242 struct Tree {
1243 nodes: [Node; NUM_LITERALS * 2],
1244 nnodes: usize,
1245 }
1246
1247 impl Tree {
1248 fn new() -> Self {
1249 Self {
1250 nodes: [Node::default(); NUM_LITERALS * 2],
1251 nnodes: 0,
1252 }
1253 }
1254 fn insert(&mut self, val: Node) {
1255 let mut idx = self.nnodes;
1256 for (i, nn) in self.nodes[..self.nnodes].iter().enumerate() {
1257 if nn.w > val.w {
1258 idx = i;
1259 break;
1260 }
1261 }
1262 if idx < self.nnodes {
1263 for i in (idx..self.nnodes).rev() {
1264 self.nodes[i + 1] = self.nodes[i];
1265 }
1266 }
1267 self.nodes[idx] = val;
1268 self.nnodes += 1;
1269 }
1270 fn trim(&mut self) {
1271 let mut start = self.nnodes;
1272 for (i, n) in self.nodes[..self.nnodes].iter().enumerate() {
1273 if n.w != 0 {
1274 start = i;
1275 break;
1276 }
1277 }
1278 if start != 0 {
1279 for i in 0..self.nnodes - start {
1280 self.nodes[i] = self.nodes[i + start];
1281 }
1282 self.nnodes -= start;
1283 }
1284 }
1285 fn build(&mut self) {
1286 if self.nnodes == 1 {
1287 self.nodes[0].w = 1;
1288 return;
1289 }
1290 let mut start = 0;
1291 while start + 1 < self.nnodes {
1292 let nn1 = self.nodes[start];
1293 let nn2 = self.nodes[start + 1];
1294 let n = Node {
1295 sym: NODE_SYM,
1296 w: nn1.w + nn2.w,
1297 idx0: start as u16,
1298 idx1: (start + 1) as u16,
1299 };
1300 self.nodes[start].w = 0;
1301 self.nodes[start + 1].w = 0;
1302 start += 2;
1303 self.insert(n);
1304 }
1305 if self.nnodes > 1 {
1306 self.assign_len(self.nnodes - 1, 0);
1307 }
1308 }
1309 fn assign_len(&mut self, idx: usize, len: u16) {
1310 if self.nodes[idx].sym == NODE_SYM {
1311 self.assign_len(self.nodes[idx].idx0 as usize, len + 1);
1312 self.assign_len(self.nodes[idx].idx1 as usize, len + 1);
1313 } else {
1314 self.nodes[idx].w = len;
1315 }
1316 }
1317 }
1318
1319 fn gen_tree(codes: &mut [u16], lens: &mut [u8], num_codes: &mut usize, stats: &mut [u32], max_bits: u8) {
1320 let mut tot_w = 0;
1321 for &w in stats.iter() {
1322 tot_w += w;
1323 }
1324 if tot_w == 0 {
1325 codes[0] = 0;
1326 lens[0] = 0;
1327 *num_codes = 0;
1328 return;
1329 }
1330 while tot_w > (1 << max_bits) {
1331 for w in stats.iter_mut() {
1332 *w = (*w + 1) >> 1;
1333 }
1334 tot_w = 0;
1335 for &w in stats.iter() {
1336 tot_w += w;
1337 }
1338 }
1339 let mut tree = Tree::new();
1340 for (sym, &w) in stats.iter().enumerate() {
1341 tree.insert(Node{ sym: sym as u16, w: w as u16, idx0: 64000, idx1: 64000 });
1342 }
1343 tree.trim();
1344 tree.build();
1345
1346 for n in tree.nodes[..tree.nnodes].iter() {
1347 if n.sym != NODE_SYM {
1348 lens[n.sym as usize] = n.w as u8;
1349 }
1350 }
1351 lengths_to_codes16(lens, codes);
1352 let mut sz = codes.len();
1353 for &len in lens.iter().rev() {
1354 if len != 0 {
1355 break;
1356 }
1357 sz -= 1;
1358 }
1359 *num_codes = sz;
1360 }
1361
1362 fn lengths_to_codes16(lens: &[u8], codes: &mut [u16]) {
1363 let mut bits = [0u32; 32];
1364 let mut pfx = [0u32; 33];
1365 for len in lens.iter() {
1366 let len = *len as usize;
1367 bits[len] += 1;
1368 }
1369 bits[0] = 0;
1370 let mut code = 0;
1371 for i in 0..bits.len() {
1372 code = (code + bits[i]) << 1;
1373 pfx[i + 1] = code;
1374 }
1375
1376 for (len, codes) in lens.iter().zip(codes.iter_mut()) {
1377 let len = *len as usize;
1378 if len != 0 {
1379 let bits = len as u8;
1380 *codes = reverse_bits(pfx[len], bits) as u16;
1381 pfx[len] += 1;
1382 } else {
1383 *codes = 0;
1384 }
1385 }
1386 }
1387
1388 trait LZParse {
1389 fn parse(&mut self, src: &[u8], dst: &mut Vec<Token>);
1390 }
1391
1392 struct NoParser {}
1393 impl LZParse for NoParser {
1394 fn parse(&mut self, src: &[u8], dst: &mut Vec<Token>) {
1395 dst.reserve(src.len());
1396 for &b in src.iter() {
1397 dst.push(Token::from_literal(b));
1398 }
1399 dst.push(TOKEN_EOB);
1400 }
1401 }
1402
1403 fn check_match(src1: &[u8], src2: &[u8]) -> u16 {
1404 let mut len = 0;
1405 for (&a, &b) in src1.iter().zip(src2.iter()) {
1406 if a != b {
1407 break;
1408 }
1409 len += 1;
1410 }
1411 len
1412 }
1413
1414 const HASH_SIZE: usize = 4096;
1415 const MAX_MATCH_LEN: usize = 258;
1416 const WINDOW_SIZE: usize = 32768 - MAX_MATCH_LEN;
1417 const NONEXT: usize = WINDOW_SIZE * 2;
1418
1419 struct MatchFinder<'a> {
1420 src: &'a [u8],
1421 pos: usize,
1422 hstart: [usize; HASH_SIZE],
1423 hend: [usize; HASH_SIZE],
1424 hnext: [usize; WINDOW_SIZE * 3],
1425 }
1426
1427 impl<'a> MatchFinder<'a> {
1428 fn new(src: &'a [u8]) -> Self {
1429 let mut obj = Self {
1430 src,
1431 pos: 0,
1432 hstart: [0; HASH_SIZE],
1433 hend: [0; HASH_SIZE],
1434 hnext: [0; WINDOW_SIZE * 3],
1435 };
1436 obj.build_hash();
1437 obj
1438 }
1439 fn hash(src: &[u8]) -> usize {
1440 let _ = src[2];
1441 (((u16::from(src[0]) << 10) ^ (u16::from(src[1]) << 5) ^ u16::from(src[2])) & ((HASH_SIZE as u16) - 1)) as usize
1442 }
1443 fn build_hash(&mut self) {
1444 for el in self.hstart.iter_mut() { *el = NONEXT; }
1445 for el in self.hend.iter_mut() { *el = NONEXT; }
1446 for el in self.hnext.iter_mut() { *el = NONEXT; }
1447 if self.pos + 3 >= self.src.len() {
1448 return;
1449 }
1450 let end = (self.src.len() - 3).min(self.pos + NONEXT);
1451 for i in (self.pos .. end).rev() {
1452 let key = Self::hash(&self.src[i..]);
1453 if self.hstart[key] == NONEXT {
1454 self.hstart[key] = i;
1455 self.hend[key] = i;
1456 self.hnext[key] = NONEXT;
1457 } else {
1458 self.hnext[self.hend[key]] = i;
1459 self.hend[key] = i;
1460 }
1461 }
1462 }
1463 fn find_match(&mut self) -> (u16, u16) {
1464 if self.pos == 0 || self.pos + 3 > self.src.len() {
1465 return (0, 0);
1466 }
1467 let key = Self::hash(&self.src[self.pos..]) as usize;
1468
1469 let mut best_pos = 0;
1470 let mut best_len = 0;
1471 let mut idx = self.hstart[key];
1472 while idx != NONEXT && idx + WINDOW_SIZE > self.pos {
1473 if idx < self.pos {
1474 let cur_len = check_match(&self.src[self.pos..], &self.src[idx..]);
1475 if cur_len > best_len {
1476 best_len = cur_len;
1477 best_pos = self.pos - idx;
1478 if best_len >= (MAX_MATCH_LEN as u16) {
1479 return (best_pos as u16, MAX_MATCH_LEN as u16);
1480 }
1481 }
1482 }
1483 idx = self.hnext[idx];
1484 }
1485 (best_pos as u16, best_len)
1486 }
1487 fn find_all_matches(&mut self, dists: &mut [u16; MAX_MATCH_LEN + 1]) {
1488 if self.pos == 0 || self.pos + 3 > self.src.len() {
1489 return;
1490 }
1491 let key = Self::hash(&self.src[self.pos..]) as usize;
1492 let mut idx = self.hstart[key];
1493 while idx != NONEXT && idx + WINDOW_SIZE > self.pos {
1494 if idx < self.pos {
1495 let cur_len = (check_match(&self.src[self.pos..], &self.src[idx..]) as usize).min(MAX_MATCH_LEN);
1496 if cur_len > 0 && dists[cur_len] == 0 {
1497 dists[cur_len] = (self.pos - idx) as u16;
1498 }
1499 }
1500 idx = self.hnext[idx];
1501 }
1502 }
1503 fn advance(&mut self, num: usize) {
1504 self.pos += num;
1505
1506 if self.pos >= NONEXT {
1507 let (_, tail) = self.src.split_at(self.pos - WINDOW_SIZE);
1508 self.src = tail;
1509 self.pos = WINDOW_SIZE;
1510 self.build_hash();
1511 } else {
1512 for (start, end) in self.hstart.iter_mut().zip(self.hend.iter_mut()) {
1513 let mut idx = *start;
1514 while idx != NONEXT && idx + WINDOW_SIZE < self.pos {
1515 idx = self.hnext[idx];
1516 *start = idx;
1517 }
1518 if idx == NONEXT {
1519 *end = NONEXT;
1520 }
1521 }
1522 }
1523 }
1524 fn get_sym(&self) -> u8 { self.src[self.pos] }
1525 fn is_end(&self) -> bool { self.pos >= self.src.len() }
1526 }
1527
1528 struct GreedyParser {}
1529 impl LZParse for GreedyParser {
1530 fn parse(&mut self, src: &[u8], dst: &mut Vec<Token>) {
1531 dst.reserve(src.len());
1532
1533 let mut matcher = MatchFinder::new(src);
1534 while !matcher.is_end() {
1535 let (best_pos, best_len) = matcher.find_match();
1536
1537 if best_len >= 3 {
1538 dst.push(Token::from_match(best_pos, best_len));
1539 matcher.advance(best_len as usize);
1540 } else {
1541 dst.push(Token::from_literal(matcher.get_sym()));
1542 matcher.advance(1);
1543 }
1544 }
1545 dst.push(TOKEN_EOB);
1546 }
1547 }
1548
1549 struct LazyParser {}
1550 impl LZParse for LazyParser {
1551 fn parse(&mut self, src: &[u8], dst: &mut Vec<Token>) {
1552 dst.reserve(src.len());
1553
1554 let mut matcher = MatchFinder::new(src);
1555 while !matcher.is_end() {
1556 let (best_pos, best_len) = matcher.find_match();
1557 if best_len >= 3 {
1558 let last_sym = matcher.get_sym();
1559 matcher.advance(1);
1560 if !matcher.is_end() {
1561 let (best_pos1, best_len1) = matcher.find_match();
1562 if best_len1 > best_len + 1 {
1563 dst.push(Token::from_literal(last_sym));
1564 dst.push(Token::from_match(best_pos1, best_len1));
1565 matcher.advance(best_len1 as usize);
1566 continue;
1567 }
1568 }
1569 dst.push(Token::from_match(best_pos, best_len));
1570 matcher.advance((best_len - 1) as usize);
1571 } else {
1572 dst.push(Token::from_literal(matcher.get_sym()));
1573 matcher.advance(1);
1574 }
1575 }
1576 dst.push(TOKEN_EOB);
1577 }
1578 }
1579
1580 #[derive(Clone,Copy)]
1581 struct TNode {
1582 price: u32,
1583 dist: u16,
1584 link: usize,
1585 }
1586
1587 impl Default for TNode {
1588 fn default() -> Self {
1589 Self {
1590 price: std::u32::MAX,
1591 dist: 0,
1592 link: 0,
1593 }
1594 }
1595 }
1596
1597 struct OptimalParser {
1598 trellis: Vec<TNode>,
1599 }
1600 impl OptimalParser {
1601 fn new() -> Self { Self::default() }
1602 fn sym_price(_sym: u8) -> u32 { 9 }
1603 fn match_price(dist: u16, _len: u16) -> u32 {
1604 if dist <= 4 {
1605 9 + 5
1606 } else {
1607 let bits = 16 - (dist - 1).leading_zeros();
1608 9 + 3 + bits
1609 }
1610 }
1611 }
1612 impl Default for OptimalParser {
1613 fn default() -> Self {
1614 Self {
1615 trellis: Vec::with_capacity(WINDOW_SIZE),
1616 }
1617 }
1618 }
1619 impl LZParse for OptimalParser {
1620 fn parse(&mut self, src: &[u8], dst: &mut Vec<Token>) {
1621 if src.is_empty() {
1622 dst.push(TOKEN_EOB);
1623 return;
1624 }
1625 dst.reserve(src.len());
1626
1627 self.trellis.truncate(0);
1628 self.trellis.reserve(src.len() + 1);
1629 for _ in 0..=src.len() {
1630 self.trellis.push(TNode::default());
1631 }
1632 self.trellis[0].price = 0;
1633
1634 let mut matcher = MatchFinder::new(src);
1635 for i in 0..self.trellis.len() - 1 {
1636 let mut dists = [0; MAX_MATCH_LEN + 1];
1637 matcher.find_all_matches(&mut dists);
1638
1639 let sym = matcher.get_sym();
1640 let lprice = Self::sym_price(sym) + self.trellis[i].price;
1641 if self.trellis[i + 1].price > lprice {
1642 self.trellis[i + 1].price = lprice;
1643 self.trellis[i + 1].link = i;
1644 }
1645 for (len, &dist) in dists.iter().enumerate() {
1646 if dist != 0 {
1647 let mprice = Self::match_price(dist, len as u16) + self.trellis[i].price;
1648 if self.trellis[i + len].price > mprice {
1649 self.trellis[i + len].price = mprice;
1650 self.trellis[i + len].link = i;
1651 self.trellis[i].dist = dist;
1652 }
1653 }
1654 }
1655
1656 matcher.advance(1);
1657 }
1658 let mut idx = self.trellis.len() - 1;
1659 let mut nidx = self.trellis[idx].link;
1660 while idx > 0 {
1661 let oidx = idx;
1662 idx = nidx;
1663 nidx = self.trellis[idx].link;
1664 self.trellis[idx].link = oidx;
1665 }
1666
1667 let mut idx = 0;
1668 while idx < self.trellis.len() - 1 {
1669 let len = self.trellis[idx].link - idx;
1670 if len == 1 {
1671 dst.push(Token::from_literal(src[idx]));
1672 } else {
1673 dst.push(Token::from_match(self.trellis[idx].dist, len as u16));
1674 }
1675 idx = self.trellis[idx].link;
1676 }
1677
1678 dst.push(TOKEN_EOB);
1679 }
1680 }
1681
1682 ///! Deflate compression mode.
1683 #[derive(Clone,Copy,Debug,PartialEq)]
1684 pub enum DeflateMode {
1685 ///! No compression.
1686 NoCompr,
1687 ///! Fast compression.
1688 Fast,
1689 ///! Still fast but better compression.
1690 Better,
1691 ///! Slow but the best compression.
1692 Best,
1693 }
1694
1695 #[derive(Clone,Copy,Debug,PartialEq)]
1696 enum Mode {
1697 Copy,
1698 Fixed,
1699 Dynamic,
1700 }
1701
1702 const MAX_BLOCK_SIZE: usize = 65535;
1703
1704 ///! Deflate stream compressor.
1705 pub struct Deflate {
1706 mode: Mode,
1707 tokens: Vec<Token>,
1708 srcbuf: [u8; MAX_BLOCK_SIZE],
1709 ssize: usize,
1710 sum1: u32,
1711 sum2: u32,
1712 zlib_mode: bool,
1713 parser: Box<dyn LZParse>,
1714 }
1715
1716 impl Deflate {
1717 ///! Creates a new instance of `Deflate`.
1718 pub fn new(mode: DeflateMode) -> Self {
1719 let (mode, parser) = match mode {
1720 DeflateMode::NoCompr => (Mode::Copy, Box::new(NoParser{}) as Box<dyn LZParse>),
1721 DeflateMode::Fast => (Mode::Fixed, Box::new(GreedyParser{}) as Box<dyn LZParse>),
1722 DeflateMode::Better => (Mode::Dynamic, Box::new(LazyParser{}) as Box<dyn LZParse>),
1723 DeflateMode::Best => (Mode::Dynamic, Box::new(OptimalParser::new()) as Box<dyn LZParse>),
1724 };
1725 Self {
1726 mode, parser,
1727 tokens: Vec::with_capacity(MAX_BLOCK_SIZE),
1728 srcbuf: [0; MAX_BLOCK_SIZE],
1729 ssize: 0,
1730 sum1: 1,
1731 sum2: 0,
1732 zlib_mode: false,
1733 }
1734 }
1735 ///! Writes zlib stream header.
1736 pub fn write_zlib_header(&mut self, wr: &mut DeflateWriter) {
1737 wr.write(8, 4);
1738 wr.write(7, 4);
1739 let level = match self.mode {
1740 Mode::Copy => 0x01,
1741 Mode::Fixed => 0x5E,
1742 Mode::Dynamic => 0x9C,
1743 // 0xDA for the strongest one
1744 };
1745 wr.write(level, 8);
1746 self.zlib_mode = true;
1747 }
1748 fn write_zlib_footer(&self, wr: &mut DeflateWriter) {
1749 wr.align();
1750 wr.write((self.sum2 >> 8) as u16, 8);
1751 wr.write((self.sum2 & 0xFF) as u16, 8);
1752 wr.write((self.sum1 >> 8) as u16, 8);
1753 wr.write((self.sum1 & 0xFF) as u16, 8);
1754 }
1755 ///! Queues data for compression.
1756 ///!
1757 ///! The data might be not actually compressed until [`compress_end`] is called.
1758 ///!
1759 ///! [`compress_end`]: ./struct.Deflate.html#method.compress_end
1760 pub fn compress(&mut self, src: &[u8], wr: &mut DeflateWriter) {
1761 let mut src = src;
1762 while !src.is_empty() {
1763 let clen = src.len().min(MAX_BLOCK_SIZE - self.ssize);
1764 let (head, tail) = src.split_at(clen);
1765 src = tail;
1766 self.srcbuf[self.ssize..][..clen].copy_from_slice(head);
1767 self.ssize += clen;
1768 if self.ssize == MAX_BLOCK_SIZE {
1769 self.do_block(wr, false);
1770 }
1771 }
1772 }
1773 ///! Tells the encoder to finish data compression.
1774 ///!
1775 ///! Complete data will be output after this call.
1776 pub fn compress_end(&mut self, wr: &mut DeflateWriter) {
1777 if self.ssize > 0 {
1778 self.do_block(wr, true);
1779 } else {
1780 wr.write(1, 1);
1781 wr.write(1, 2);
1782 wr.write(0, 7); //static EOF sym
1783 }
1784 if self.zlib_mode {
1785 self.write_zlib_footer(wr);
1786 }
1787 }
1788 fn do_block(&mut self, wr: &mut DeflateWriter, final_block: bool) {
1789 const CRC_BASE: u32 = 65521;
1790 for &b in self.srcbuf[..self.ssize].iter() {
1791 self.sum1 += u32::from(b);
1792 if self.sum1 >= CRC_BASE {
1793 self.sum1 -= CRC_BASE;
1794 }
1795 self.sum2 += self.sum1;
1796 if self.sum2 >= CRC_BASE {
1797 self.sum2 -= CRC_BASE;
1798 }
1799 }
1800 match self.mode {
1801 Mode::Copy => {
1802 wr.write(final_block as u16, 1);
1803 wr.write(0, 2);
1804 wr.align();
1805 wr.write(self.ssize as u16, 16);
1806 wr.write(!self.ssize as u16, 16);
1807 for &b in self.srcbuf[..self.ssize].iter() {
1808 wr.write(u16::from(b), 8);
1809 }
1810 },
1811 Mode::Fixed => {
1812 wr.write(final_block as u16, 1);
1813 wr.write(1, 2);
1814 self.tokens.truncate(0);
1815 self.parser.parse(&self.srcbuf[..self.ssize], &mut self.tokens);
1816 let mut codes = CodeHuff::new(true);
1817 codes.make_codes(&self.tokens);
1818 let mut dists = DistHuff::new(true);
1819 dists.make_codes(&self.tokens);
1820 wr.write_tokens(&self.tokens, &codes, &dists);
1821 },
1822 Mode::Dynamic => {
1823 wr.write(final_block as u16, 1);
1824 wr.write(2, 2);
1825 self.tokens.truncate(0);
1826 self.parser.parse(&self.srcbuf[..self.ssize], &mut self.tokens);
1827 let mut codes = CodeHuff::new(false);
1828 codes.make_codes(&self.tokens);
1829 let mut dists = DistHuff::new(false);
1830 dists.make_codes(&self.tokens);
1831 wr.write((codes.num_codes - 257) as u16, 5);
1832 wr.write((dists.num_codes - 1) as u16, 5);
1833 wr.write_codes(&codes, &dists);
1834 wr.write_tokens(&self.tokens, &codes, &dists);
1835 },
1836 }
1837 self.ssize = 0;
1838 }
1839 }
1840
1841 #[cfg(test)]
1842 mod test {
1843 use super::*;
1844
1845 #[test]
1846 fn test_inflate1() {
1847 const TEST_DATA: &[u8] = &[
1848 0xF3, 0x48, 0xCD, 0xC9, 0xC9, 0xD7, 0x51, 0x28,
1849 0xCF, 0x2F, 0xCA, 0x49, 0x51, 0x04, 0x00 ];
1850 const TEST_REF: &[u8] = b"Hello, world!";
1851 let mut dst_buf = [0u8; 13];
1852 let len = Inflate::uncompress(TEST_DATA, &mut dst_buf).unwrap();
1853 assert_eq!(len, 13);
1854 for i in 0..len {
1855 assert_eq!(dst_buf[i], TEST_REF[i]);
1856 }
1857 }
1858 #[test]
1859 fn test_inflate2() {
1860 const TEST_DATA3: &[u8] = &[ 0x4B, 0x4C, 0x44, 0x80, 0x24, 0x54, 0x80, 0x2C, 0x06, 0x00 ];
1861 const TEST_REF3: &[u8] = b"aaaaaaaaaaaabbbbbbbbbbbbbbbaaaaabbbbbbb";
1862 let mut dst_buf = [0u8; 39];
1863
1864 let mut inflate = Inflate::new();
1865 let mut output_chunk = [0u8; 7];
1866 let mut output_pos = 0;
1867 for input in TEST_DATA3.chunks(3) {
1868 let mut repeat = false;
1869 loop {
1870 let ret = inflate.decompress_data(input, &mut output_chunk, repeat);
1871 match ret {
1872 Ok(len) => {
1873 for i in 0..len {
1874 dst_buf[output_pos + i] = output_chunk[i];
1875 }
1876 output_pos += len;
1877 break;
1878 },
1879 Err(DecompressError::ShortData) => {
1880 break;
1881 },
1882 Err(DecompressError::OutputFull) => {
1883 repeat = true;
1884 for i in 0..output_chunk.len() {
1885 dst_buf[output_pos + i] = output_chunk[i];
1886 }
1887 output_pos += output_chunk.len();
1888 },
1889 _ => {
1890 panic!("decompress error {:?}", ret.err().unwrap());
1891 },
1892 }
1893 }
1894 }
1895
1896 assert_eq!(output_pos, dst_buf.len());
1897 for i in 0..output_pos {
1898 assert_eq!(dst_buf[i], TEST_REF3[i]);
1899 }
1900 }
1901 #[test]
1902 fn test_inflate3() {
1903 const TEST_DATA: &[u8] = &[
1904 0x1F, 0x8B, 0x08, 0x08, 0xF6, 0x7B, 0x90, 0x5E, 0x02, 0x03, 0x31, 0x2E, 0x74, 0x78, 0x74, 0x00,
1905 0xE5, 0x95, 0x4B, 0x4E, 0xC3, 0x30, 0x10, 0x40, 0xF7, 0x39, 0xC5, 0x1C, 0x00, 0x16, 0x70, 0x83,
1906 0x0A, 0xB5, 0x3B, 0xE8, 0x82, 0x5E, 0x60, 0x1A, 0x4F, 0xE2, 0x11, 0xFE, 0x44, 0x1E, 0xA7, 0x69,
1907 0x6E, 0xCF, 0x38, 0xDD, 0xB0, 0x40, 0xA2, 0x46, 0x2D, 0x20, 0x2A, 0xE5, 0xAB, 0xCC, 0xE7, 0xBD,
1908 0x49, 0xAC, 0x6C, 0x03, 0x64, 0x4B, 0xD0, 0x71, 0x92, 0x0C, 0x06, 0x67, 0x88, 0x1D, 0x3C, 0xD9,
1909 0xC4, 0x92, 0x3D, 0x4A, 0xF3, 0x3C, 0x43, 0x4E, 0x23, 0x81, 0x8B, 0x07, 0x82, 0x1E, 0xF5, 0x90,
1910 0x23, 0x78, 0x6A, 0x56, 0x30, 0x60, 0xCA, 0x89, 0x4D, 0x4F, 0xC0, 0x01, 0x10, 0x06, 0xC2, 0xA4,
1911 0xA1, 0x44, 0xCD, 0xF6, 0x54, 0x50, 0xA8, 0x8D, 0xC1, 0x9C, 0x5F, 0x71, 0x37, 0x45, 0xC8, 0x63,
1912 0xCA, 0x8E, 0xC0, 0xE8, 0x23, 0x69, 0x56, 0x9A, 0x8D, 0x5F, 0xB6, 0xC9, 0x96, 0x53, 0x4D, 0x17,
1913 0xAB, 0xB9, 0xB0, 0x49, 0x14, 0x5A, 0x0B, 0x96, 0x82, 0x7C, 0xB7, 0x6F, 0x17, 0x35, 0xC7, 0x9E,
1914 0xDF, 0x78, 0xA3, 0xF1, 0xD0, 0xA2, 0x73, 0x1C, 0x7A, 0xD8, 0x2B, 0xB3, 0x5C, 0x90, 0x85, 0xBB,
1915 0x2A, 0x14, 0x2E, 0xF7, 0xD1, 0x19, 0x48, 0x0A, 0x23, 0x57, 0x45, 0x13, 0x3E, 0xD6, 0xA0, 0xBD,
1916 0xF2, 0x11, 0x7A, 0x22, 0x21, 0xAD, 0xE5, 0x70, 0x56, 0xA0, 0x9F, 0xA5, 0xA5, 0x03, 0x85, 0x2A,
1917 0xDE, 0x92, 0x00, 0x32, 0x61, 0x10, 0xAD, 0x27, 0x13, 0x7B, 0x5F, 0x98, 0x7F, 0x59, 0x83, 0xB8,
1918 0xB7, 0x35, 0x16, 0xEB, 0x12, 0x0F, 0x1E, 0xD9, 0x14, 0x0B, 0xCF, 0xEE, 0x6D, 0x91, 0xF8, 0x93,
1919 0x6E, 0x81, 0x3F, 0x7F, 0x41, 0xA4, 0x22, 0x1F, 0xB7, 0xE6, 0x85, 0x83, 0x9A, 0xA2, 0x61, 0x12,
1920 0x0D, 0x0F, 0x6D, 0x01, 0xBD, 0xB0, 0xE8, 0x1D, 0xEC, 0xD1, 0xA0, 0xBF, 0x1F, 0x4E, 0xFB, 0x55,
1921 0xBD, 0x73, 0xDD, 0x87, 0xB9, 0x53, 0x23, 0x17, 0xD3, 0xE2, 0xE9, 0x08, 0x87, 0x42, 0xFF, 0xCF,
1922 0x26, 0x42, 0xAE, 0x76, 0xB5, 0xAE, 0x97, 0x0C, 0x18, 0x78, 0xA0, 0x24, 0xE5, 0x54, 0x0C, 0x6E,
1923 0x60, 0x52, 0x79, 0x22, 0x57, 0xF5, 0x87, 0x78, 0x78, 0x04, 0x93, 0x46, 0xEF, 0xCB, 0x98, 0x96,
1924 0x8B, 0x65, 0x00, 0xB7, 0x36, 0xBD, 0x77, 0xA8, 0xBD, 0x5A, 0xAA, 0x1A, 0x09, 0x00, 0x00
1925 ];
1926
1927 let mut mr = MemoryReader::new_read(TEST_DATA);
1928 let mut br = ByteReader::new(&mut mr);
1929 let _dst_buf = gzip_decode(&mut br, false).unwrap();
1930
1931 // println!("{}", String::from_utf8_lossy(_dst_buf.as_slice()));
1932 }
1933 #[test]
1934 fn test_deflate_crc() {
1935 let output = Vec::with_capacity(20);
1936 let mut writer = DeflateWriter::new(output);
1937 let mut compr = Deflate::new(DeflateMode::NoCompr);
1938 compr.write_zlib_header(&mut writer);
1939 compr.compress(b"Hello, world!", &mut writer);
1940 compr.compress_end(&mut writer);
1941 let output = writer.end();
1942 assert_eq!(output.as_slice(), b"\x78\x01\x01\x0D\x00\xF2\xFFHello, world!\x20\x5E\x04\x8A");
1943 }
1944 fn deflate_test(mode: DeflateMode) {
1945 const SRC: &[u8] =
1946 b"The first day of Christmas,
1947 My true love sent to me
1948 A partridge in a pear tree.
1949
1950 The second day of Christmas,
1951 My true love sent to me
1952 Two turtle doves, and
1953 A partridge in a pear tree.
1954
1955 The third day of Christmas,
1956 My true love sent to me
1957 Three French hens,
1958 Two turtle doves, and
1959 A partridge in a pear tree.
1960
1961 The fourth day of Christmas,
1962 My true love sent to me
1963 Four colly birds,
1964 Three French hens,
1965 Two turtle doves, and
1966 A partridge in a pear tree.
1967
1968 The fifth day of Christmas,
1969 My true love sent to me
1970 Five gold rings,
1971 Four colly birds,
1972 Three French hens,
1973 Two turtle doves, and
1974 A partridge in a pear tree.";
1975 let output = Vec::with_capacity(SRC.len() + 16);
1976 let mut writer = DeflateWriter::new(output);
1977 let mut compr = Deflate::new(mode);
1978 compr.write_zlib_header(&mut writer);
1979 compr.compress(SRC, &mut writer);
1980 compr.compress_end(&mut writer);
1981 let output = writer.end();
1982 let mut uncompr = vec![0u8; SRC.len()];
1983 Inflate::uncompress(&output, &mut uncompr).unwrap();
1984 assert_eq!(SRC, uncompr.as_slice());
1985 }
1986 #[test]
1987 fn test_deflate_fast() {
1988 deflate_test(DeflateMode::Fast);
1989 }
1990 #[test]
1991 fn test_deflate_better() {
1992 deflate_test(DeflateMode::Better);
1993 }
1994 #[test]
1995 fn test_deflate_best() {
1996 deflate_test(DeflateMode::Best);
1997 }
1998 }