yazi/
encode.rs

1//! RFC 1590 compression implementation.
2
3#![allow(clippy::needless_range_loop, clippy::new_without_default)]
4
5use super::{Adler32, Error, Format};
6use alloc::boxed::Box;
7use alloc::vec::Vec;
8use core::convert::TryInto;
9
10#[cfg(feature = "std")]
11use std::io::{self, Write};
12
13/// The level of compression-- a compromise between speed and size.
14#[derive(Copy, Clone, PartialEq, Debug)]
15pub enum CompressionLevel {
16    /// No compression. Outputs raw blocks.
17    None,
18    /// Fast compression.
19    BestSpeed,
20    /// Compromise between speed and size.
21    Default,
22    /// Slower compression for smaller size.
23    BestSize,
24    /// A specific compression level from 1-10.
25    Specific(u8),
26}
27
28impl CompressionLevel {
29    fn to_raw(self) -> usize {
30        use CompressionLevel::*;
31        match self {
32            None => 0,
33            BestSpeed => 1,
34            Default => 6,
35            BestSize => 9,
36            Specific(level) => 10.min(level as usize),
37        }
38    }
39}
40
41/// Selects between various specialized compressor modes.
42#[derive(Copy, Clone)]
43pub enum CompressionStrategy {
44    /// Let it do its thing.
45    Default,
46    /// Run-length encoding only.
47    RLE,
48    /// Ignore matches fewer than 5 bytes.
49    Filtered,
50    /// Static blocks only.
51    Static,
52    /// Huffman encoding only.
53    Huffman,
54}
55
56/// Stateful context for compression.
57///
58/// See the crate level [compression](index.html#compression) section
59/// for detailed usage.
60pub struct Encoder(DeflateContext);
61
62impl Encoder {
63    /// Creates a new deflate encoder. Note that creating an encoder with this
64    /// method allocates a large (200-300k) chunk of data on the stack and is
65    /// likely to cause an overflow if not carefully managed. See the [`boxed()`]
66    /// constructor for a safer method that allocates on the heap.
67    ///
68    /// [`boxed()`]: Self::boxed
69    pub fn new() -> Self {
70        let flags = make_flags(
71            false,
72            CompressionLevel::Default,
73            CompressionStrategy::Default,
74        );
75        Self(DeflateContext {
76            flags,
77            ready: true,
78            zlib: false,
79            level: CompressionLevel::Default,
80            strategy: CompressionStrategy::Default,
81            greedy_parsing: flags & GREEDY_PARSING != 0,
82            block_index: 0,
83            saved_match_dist: 0,
84            saved_match_len: 0,
85            saved_lit: 0,
86            saved_bit_buffer: 0,
87            saved_bits_in: 0,
88            adler32: Adler32::new(),
89            lt: LiteralLengthTree::new(),
90            dt: DistanceTree::new(),
91            pt: PrecodeTree::new(),
92            cb: CodeBuffer::new(),
93            dict: Dictionary::new(flags),
94        })
95    }
96
97    /// Creates a new deflate encoder on the heap.
98    pub fn boxed() -> Box<Self> {
99        let flags = make_flags(
100            false,
101            CompressionLevel::Default,
102            CompressionStrategy::Default,
103        );
104        Box::new(Self(DeflateContext {
105            flags,
106            ready: true,
107            zlib: false,
108            level: CompressionLevel::Default,
109            strategy: CompressionStrategy::Default,
110            greedy_parsing: flags & GREEDY_PARSING != 0,
111            block_index: 0,
112            saved_match_dist: 0,
113            saved_match_len: 0,
114            saved_lit: 0,
115            saved_bit_buffer: 0,
116            saved_bits_in: 0,
117            adler32: Adler32::new(),
118            lt: LiteralLengthTree::new(),
119            dt: DistanceTree::new(),
120            pt: PrecodeTree::new(),
121            cb: CodeBuffer::new(),
122            dict: Dictionary::new(flags),
123        }))
124    }
125
126    /// Sets the format of the output bitstream for the next usage of the
127    /// encoder.
128    pub fn set_format(&mut self, format: Format) {
129        self.0.reset(format == Format::Zlib);
130    }
131
132    /// Sets the compression level for the next usage of the encoder.
133    pub fn set_level(&mut self, level: CompressionLevel) {
134        let flags = make_flags(self.0.zlib, level, self.0.strategy);
135        self.0.flags = flags;
136        self.0.level = level;
137        self.0.greedy_parsing = flags & GREEDY_PARSING != 0;
138        self.0.dict.max_probes = Dictionary::probes_from_flags(flags);
139    }
140
141    /// Sets the compression strategy for the next usage of the encoder.
142    pub fn set_strategy(&mut self, strategy: CompressionStrategy) {
143        let flags = make_flags(self.0.zlib, self.0.level, strategy);
144        self.0.flags = flags;
145        self.0.strategy = strategy;
146        self.0.greedy_parsing = flags & GREEDY_PARSING != 0;
147        self.0.dict.max_probes = Dictionary::probes_from_flags(flags);
148    }
149
150    /// Creates an encoder stream that will write into the specified writer.
151    #[cfg(feature = "std")]
152    pub fn stream<'a, W: Write>(
153        &'a mut self,
154        writer: &'a mut W,
155    ) -> EncoderStream<'a, impl Sink + 'a> {
156        self.0.reset(self.0.zlib);
157        EncoderStream {
158            ctx: &mut self.0,
159            sink: WriterSink::new(writer),
160            finished: false,
161        }
162    }
163
164    /// Creates an encoder stream that will write into the specified vector.
165    /// The resulting stream will not clear the vector but will instead append
166    /// the compressed data.   
167    pub fn stream_into_vec<'a>(
168        &'a mut self,
169        vec: &'a mut Vec<u8>,
170    ) -> EncoderStream<'a, impl Sink + 'a> {
171        self.0.reset(self.0.zlib);
172        EncoderStream {
173            ctx: &mut self.0,
174            sink: VecSink::new(vec),
175            finished: false,
176        }
177    }
178
179    /// Creates an encoder stream that will write into the specified buffer.
180    /// The stream will generate an overflow error if the buffer is not large
181    /// enough to contain the compressed data.
182    pub fn stream_into_buf<'a>(
183        &'a mut self,
184        buf: &'a mut [u8],
185    ) -> EncoderStream<'a, impl Sink + 'a> {
186        self.0.reset(self.0.zlib);
187        EncoderStream {
188            ctx: &mut self.0,
189            sink: BufSink::new(buf),
190            finished: false,
191        }
192    }
193}
194
195/// Compression stream combining an encoder context with an output.
196///
197/// See the crate level [compression](index.html#compression) section
198/// for detailed usage.
199pub struct EncoderStream<'a, S: Sink> {
200    ctx: &'a mut DeflateContext,
201    sink: S,
202    finished: bool,
203}
204
205impl<S: Sink> EncoderStream<'_, S> {
206    /// Writes the specified buffer to the stream, producing compressed data
207    /// in the output.
208    pub fn write(&mut self, buf: &[u8]) -> Result<(), Error> {
209        if self.finished {
210            return Err(Error::Finished);
211        }
212        self.ctx.deflate(buf, &mut self.sink, false)
213    }
214
215    /// Returns the number of compressed bytes that have been written to the
216    /// output.
217    pub fn compressed_size(&self) -> u64 {
218        self.sink.written()
219    }
220
221    /// Consumes the stream, flushing any input that may be buffered and any
222    /// remaining output. Returns the total number of compressed bytes written
223    /// to the output.
224    pub fn finish(mut self) -> Result<u64, Error> {
225        if self.finished {
226            return Err(Error::Finished);
227        }
228        self.finished = true;
229        self.ctx.deflate(&[], &mut self.sink, true)?;
230        self.ctx.flush_block(&mut self.sink, true)?;
231        Ok(self.sink.written())
232    }
233}
234
235impl<S: Sink> Drop for EncoderStream<'_, S> {
236    fn drop(&mut self) {
237        if !self.finished {
238            self.finished = true;
239            let _ = self.ctx.deflate(&[], &mut self.sink, true);
240            let _ = self.ctx.flush_block(&mut self.sink, true);
241        }
242    }
243}
244
245#[cfg(feature = "std")]
246impl<S: Sink> Write for EncoderStream<'_, S> {
247    fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
248        match self.ctx.deflate(buf, &mut self.sink, false) {
249            Ok(_) => Ok(buf.len()),
250            Err(err) => match err {
251                Error::Io(err) => Err(err),
252                Error::Underflow | Error::Overflow => {
253                    Err(io::Error::from(io::ErrorKind::InvalidInput))
254                }
255                _ => Err(io::Error::from(io::ErrorKind::InvalidData)),
256            },
257        }
258    }
259
260    fn flush(&mut self) -> io::Result<()> {
261        Ok(())
262    }
263}
264
265/// Compresses a buffer into a vector with the specified format and
266/// compression level.
267pub fn compress(buf: &[u8], format: Format, level: CompressionLevel) -> Result<Vec<u8>, Error> {
268    let mut encoder = Encoder::boxed();
269    encoder.set_format(format);
270    encoder.set_level(level);
271    let mut vec = Vec::new();
272    let mut stream = encoder.stream_into_vec(&mut vec);
273    stream.write(buf)?;
274    stream.finish()?;
275    Ok(vec)
276}
277
278struct DeflateContext {
279    flags: u32,
280    ready: bool,
281    zlib: bool,
282    level: CompressionLevel,
283    strategy: CompressionStrategy,
284    greedy_parsing: bool,
285    block_index: u32,
286    saved_match_dist: usize,
287    saved_match_len: usize,
288    saved_lit: u8,
289    saved_bit_buffer: u32,
290    saved_bits_in: u32,
291    adler32: Adler32,
292    lt: LiteralLengthTree,
293    dt: DistanceTree,
294    pt: PrecodeTree,
295    cb: CodeBuffer,
296    dict: Dictionary,
297}
298
299impl DeflateContext {
300    fn deflate<S: Sink>(&mut self, buf: &[u8], sink: &mut S, is_last: bool) -> Result<(), Error> {
301        if !is_last && buf.is_empty() {
302            return Ok(());
303        }
304        self.deflate_inner(buf, sink, is_last)?;
305        if self.flags & WRITE_ZLIB_HEADER != 0 {
306            self.adler32.update(buf);
307        }
308        Ok(())
309    }
310
311    fn deflate_inner<S: Sink>(
312        &mut self,
313        data: &[u8],
314        sink: &mut S,
315        is_last: bool,
316    ) -> Result<(), Error> {
317        const DICT_MASK: usize = DICTIONARY_SIZE - 1;
318        self.ready = false;
319        let mut src_pos = 0;
320        let mut lookahead_size = self.dict.lookahead_size;
321        let mut lookahead_pos = self.dict.lookahead_pos;
322        let mut saved_lit = self.saved_lit;
323        let mut saved_match_dist = self.saved_match_dist;
324        let mut saved_match_len = self.saved_match_len;
325        while src_pos < data.len() || (is_last && lookahead_size != 0) {
326            let src_buf_left = data.len() - src_pos;
327            let num_bytes_to_process = src_buf_left.min(MAX_MATCH_LEN - lookahead_size);
328            if lookahead_size + self.dict.len >= MIN_MATCH_LEN - 1 && num_bytes_to_process > 0 {
329                let dict = &mut self.dict;
330                let mut dst_pos = (lookahead_pos + lookahead_size) & DICT_MASK;
331                let mut ins_pos = lookahead_pos + lookahead_size - 2;
332                let mut hash = (u32::from(dict.dict[ins_pos & DICT_MASK]) << HASH_SHIFT)
333                    ^ u32::from(dict.dict[(ins_pos + 1) & DICT_MASK]);
334                lookahead_size += num_bytes_to_process;
335                for &c in &data[src_pos..src_pos + num_bytes_to_process] {
336                    dict.dict[dst_pos] = c;
337                    if dst_pos < MAX_MATCH_LEN - 1 {
338                        dict.dict[DICTIONARY_SIZE + dst_pos] = c;
339                    }
340                    hash = ((hash << HASH_SHIFT) ^ u32::from(c)) & (HASH_SIZE as u32 - 1);
341                    dict.next[ins_pos & DICT_MASK] = dict.hash[hash as usize];
342                    dict.hash[hash as usize] = ins_pos as u16;
343                    dst_pos = (dst_pos + 1) & DICT_MASK;
344                    ins_pos += 1;
345                }
346                src_pos += num_bytes_to_process;
347            } else {
348                let dict = &mut self.dict;
349                for &c in &data[src_pos..src_pos + num_bytes_to_process] {
350                    let dst_pos = (lookahead_pos + lookahead_size) & DICT_MASK;
351                    dict.dict[dst_pos] = c;
352                    if dst_pos < MAX_MATCH_LEN - 1 {
353                        dict.dict[DICTIONARY_SIZE + dst_pos] = c;
354                    }
355                    lookahead_size += 1;
356                    if lookahead_size + dict.len >= MIN_MATCH_LEN {
357                        let ins_pos = lookahead_pos + lookahead_size - 3;
358                        let hash = ((u32::from(dict.dict[ins_pos & DICT_MASK])
359                            << (HASH_SHIFT * 2))
360                            ^ ((u32::from(dict.dict[(ins_pos + 1) & DICT_MASK]) << HASH_SHIFT)
361                                ^ u32::from(c)))
362                            & (HASH_SIZE as u32 - 1);
363                        dict.next[ins_pos & DICT_MASK] = dict.hash[hash as usize];
364                        dict.hash[hash as usize] = ins_pos as u16;
365                    }
366                }
367                src_pos += num_bytes_to_process;
368            }
369            self.dict.len = self.dict.len.min(DICTIONARY_SIZE - lookahead_size);
370            if lookahead_size < MAX_MATCH_LEN && !is_last {
371                break;
372            }
373            let mut len_to_move = 1;
374            let mut cur_match_dist = 0;
375            let mut cur_match_len = if saved_match_len != 0 {
376                saved_match_len
377            } else {
378                MIN_MATCH_LEN - 1
379            };
380            let cur_pos = lookahead_pos & DICT_MASK;
381            if self.flags & (RLE_MATCHES | FORCE_RAW) != 0 {
382                if self.dict.len != 0 && self.flags & FORCE_RAW == 0 {
383                    let c = self.dict.dict[cur_pos.wrapping_sub(1) & DICT_MASK];
384                    cur_match_len = self.dict.dict[cur_pos..(cur_pos + lookahead_size)]
385                        .iter()
386                        .take_while(|&x| *x == c)
387                        .count();
388                    if cur_match_len < MIN_MATCH_LEN {
389                        cur_match_len = 0
390                    } else {
391                        cur_match_dist = 1
392                    }
393                }
394            } else {
395                let dist_len = self.dict.find_match(
396                    lookahead_pos,
397                    self.dict.len,
398                    lookahead_size,
399                    cur_match_dist,
400                    cur_match_len,
401                );
402                cur_match_dist = dist_len.0;
403                cur_match_len = dist_len.1;
404            }
405            let far_and_small = cur_match_len == MIN_MATCH_LEN && cur_match_dist >= 8 * 1024;
406            let filter_small = self.flags & FILTER_MATCHES != 0 && cur_match_len <= 5;
407            if far_and_small || filter_small || cur_pos == cur_match_dist {
408                cur_match_dist = 0;
409                cur_match_len = 0;
410            }
411            if saved_match_len != 0 {
412                if cur_match_len > saved_match_len {
413                    self.cb.push_literal(saved_lit, &mut self.lt);
414                    if cur_match_len >= 128 {
415                        self.cb.push_match(
416                            cur_match_len,
417                            cur_match_dist,
418                            &mut self.lt,
419                            &mut self.dt,
420                        );
421                        saved_match_len = 0;
422                        len_to_move = cur_match_len;
423                    } else {
424                        saved_lit = self.dict.get(cur_pos);
425                        saved_match_dist = cur_match_dist;
426                        saved_match_len = cur_match_len;
427                    }
428                } else {
429                    self.cb.push_match(
430                        saved_match_len,
431                        saved_match_dist,
432                        &mut self.lt,
433                        &mut self.dt,
434                    );
435                    len_to_move = saved_match_len - 1;
436                    saved_match_len = 0;
437                }
438            } else if cur_match_dist == 0 {
439                self.cb.push_literal(self.dict.get(cur_pos), &mut self.lt);
440            } else if self.greedy_parsing || (self.flags & RLE_MATCHES != 0) || cur_match_len >= 128
441            {
442                self.cb
443                    .push_match(cur_match_len, cur_match_dist, &mut self.lt, &mut self.dt);
444                len_to_move = cur_match_len;
445            } else {
446                saved_lit = self.dict.get(cur_pos);
447                saved_match_dist = cur_match_dist;
448                saved_match_len = cur_match_len;
449            }
450            lookahead_pos += len_to_move;
451            assert!(lookahead_size >= len_to_move);
452            lookahead_size -= len_to_move;
453            self.dict.len = (self.dict.len + len_to_move).min(DICTIONARY_SIZE);
454            let lz_buf_tight = self.cb.pos > CODE_BUFFER_SIZE - 8;
455            let raw = self.flags & FORCE_RAW != 0;
456            let fat = ((self.cb.pos * 115) >> 7) >= self.cb.total_bytes;
457            let fat_or_raw = (self.cb.total_bytes > 31 * 1024) && (fat || raw);
458            if lz_buf_tight || fat_or_raw {
459                self.dict.lookahead_size = lookahead_size;
460                self.dict.lookahead_pos = lookahead_pos;
461                self.flush_block(sink, false)?;
462            }
463        }
464        self.dict.lookahead_size = lookahead_size;
465        self.dict.lookahead_pos = lookahead_pos;
466        self.saved_lit = saved_lit;
467        self.saved_match_dist = saved_match_dist;
468        self.saved_match_len = saved_match_len;
469        Ok(())
470    }
471
472    fn flush_block<S: Sink>(&mut self, sink: &mut S, finish: bool) -> Result<(), Error> {
473        sink.set_bit_buffer(self.saved_bit_buffer, self.saved_bits_in);
474        let mut snapshot;
475        let use_raw_block = (self.flags & FORCE_RAW != 0)
476            && (self.dict.lookahead_pos - self.dict.code_buffer_offset) <= self.dict.len;
477        self.cb.init_flag();
478        if self.flags & WRITE_ZLIB_HEADER != 0 && self.block_index == 0 {
479            let header = make_zlib_header(self.flags);
480            sink.put_bits(header[0].into(), 8)?;
481            sink.put_bits(header[1].into(), 8)?;
482        }
483        sink.put_bits(finish as u32, 1)?;
484        snapshot = sink.snapshot();
485        let comp_success = if !use_raw_block {
486            let use_static = (self.flags & FORCE_STATIC != 0) || (self.cb.total_bytes < 48);
487            self.emit_block(sink, use_static)?;
488            true
489        } else {
490            false
491        };
492        let end_pos = sink.snapshot().pos;
493        let expanded = (self.cb.total_bytes > 32)
494            && (end_pos - snapshot.pos + 1 >= self.cb.total_bytes)
495            && (self.dict.lookahead_pos - self.dict.code_buffer_offset <= self.dict.len);
496        if use_raw_block || expanded {
497            sink.restore(&snapshot);
498            sink.put_bits(0, 2)?;
499            sink.pad()?;
500            sink.put_bits(self.cb.total_bytes as u32 & 0xFFFF, 16)?;
501            sink.put_bits(!self.cb.total_bytes as u32 & 0xFFFF, 16)?;
502            for i in 0..self.cb.total_bytes {
503                let pos = (self.dict.code_buffer_offset + i) & DICTIONARY_SIZE_MASK;
504                sink.put_bits(u32::from(self.dict.dict[pos]), 8)?;
505            }
506        } else if !comp_success {
507            sink.restore(&snapshot);
508            self.emit_block(sink, true)?;
509        }
510        if finish {
511            sink.pad()?;
512            if self.flags & WRITE_ZLIB_HEADER != 0 {
513                let mut adler = self.adler32.finish();
514                for _ in 0..4 {
515                    sink.put_bits((adler >> 24) & 0xFF, 8)?;
516                    adler <<= 8;
517                }
518            }
519        }
520        self.lt.reset();
521        self.dt.reset();
522        self.cb.pos = 1;
523        self.cb.flags_offset = 0;
524        self.cb.flags_left = 8;
525        self.dict.code_buffer_offset += self.cb.total_bytes;
526        self.cb.total_bytes = 0;
527        self.block_index += 1;
528        snapshot = sink.snapshot();
529        self.saved_bit_buffer = snapshot.bit_buffer;
530        self.saved_bits_in = snapshot.bits_in;
531        sink.flush()
532    }
533
534    fn reset(&mut self, zlib: bool) {
535        if self.ready && zlib == self.zlib {
536            return;
537        }
538        let flags = make_flags(zlib, self.level, self.strategy);
539        self.zlib = zlib;
540        self.flags = flags;
541        self.greedy_parsing = flags & GREEDY_PARSING != 0;
542        self.block_index = 0;
543        self.saved_lit = 0;
544        self.saved_match_dist = 0;
545        self.saved_match_len = 0;
546        self.saved_bit_buffer = 0;
547        self.saved_bits_in = 0;
548        self.dict.code_buffer_offset = 0;
549        self.dict.len = 0;
550        self.dict.lookahead_pos = 0;
551        self.dict.lookahead_size = 0;
552        self.dict.max_probes = Dictionary::probes_from_flags(flags);
553        self.cb.reset();
554        if !self.ready {
555            self.lt.reset();
556            self.dt.reset();
557            self.pt.reset();
558        }
559        self.ready = true;
560        self.adler32 = Adler32::new();
561    }
562}
563
564impl DeflateContext {
565    fn start_dynamic_block<S: Sink>(&mut self, sink: &mut S) -> Result<(), Error> {
566        const CODE_SIZES_LEN: usize = LITERAL_LENGTH_TREE_SIZE + DISTANCE_TREE_SIZE;
567        let mut code_sizes_to_pack = [0u8; CODE_SIZES_LEN];
568        let mut packed = [0u8; CODE_SIZES_LEN];
569        self.lt.counts[256] = 1;
570        self.lt.optimize(false);
571        self.dt.optimize(false);
572        let mut num_lit_codes = 286;
573        while num_lit_codes > 257 {
574            if self.lt.code_sizes[num_lit_codes - 1] != 0 {
575                break;
576            }
577            num_lit_codes -= 1;
578        }
579        let mut num_dist_codes = 30;
580        while num_dist_codes > 1 {
581            if self.dt.code_sizes[num_dist_codes - 1] != 0 {
582                break;
583            }
584            num_dist_codes -= 1;
585        }
586        code_sizes_to_pack[0..num_lit_codes].copy_from_slice(&self.lt.code_sizes[0..num_lit_codes]);
587        code_sizes_to_pack[num_lit_codes..num_lit_codes + num_dist_codes]
588            .copy_from_slice(&self.dt.code_sizes[0..num_dist_codes]);
589        let total_code_sizes_to_pack = num_lit_codes + num_dist_codes;
590        let mut num_packed = 0;
591        for i in 0..PRECODE_TREE_SIZE {
592            self.pt.counts[i] = 0;
593        }
594        let mut rle = Rle::new();
595        for i in 0..total_code_sizes_to_pack {
596            let code_size = code_sizes_to_pack[i] as usize;
597            if code_size == 0 {
598                rle.prev(&mut packed, &mut num_packed, &mut self.pt);
599                rle.z_count += 1;
600                if rle.z_count == 138 {
601                    rle.zero(&mut packed, &mut num_packed, &mut self.pt);
602                }
603            } else {
604                rle.zero(&mut packed, &mut num_packed, &mut self.pt);
605                if code_size != rle.prev {
606                    rle.prev(&mut packed, &mut num_packed, &mut self.pt);
607                    self.pt.counts[code_size] += 1;
608                    packed[num_packed] = code_size as u8;
609                    num_packed += 1;
610                } else {
611                    rle.repeat_count += 1;
612                    if rle.repeat_count == 6 {
613                        rle.prev(&mut packed, &mut num_packed, &mut self.pt);
614                    }
615                }
616            }
617            rle.prev = code_size;
618        }
619        if rle.repeat_count != 0 {
620            rle.prev(&mut packed, &mut num_packed, &mut self.pt);
621        } else {
622            rle.zero(&mut packed, &mut num_packed, &mut self.pt);
623        }
624        self.pt.optimize();
625        sink.put_bits(2, 2)?;
626        sink.put_bits(num_lit_codes as u32 - 257, 5)?;
627        sink.put_bits(num_dist_codes as u32 - 1, 5)?;
628        let mut num_bit_lengths = 0;
629        for i in (0..=18).rev() {
630            if self.pt.code_sizes[PRECODE_SWIZZLE[i] as usize] != 0 {
631                num_bit_lengths = i;
632                break;
633            }
634        }
635        num_bit_lengths = 4.max(num_bit_lengths + 1);
636        sink.put_bits(num_bit_lengths as u32 - 4, 4)?;
637        for swizzle in &PRECODE_SWIZZLE[..num_bit_lengths] {
638            sink.put_bits(self.pt.code_sizes[*swizzle as usize] as u32, 3)?;
639        }
640        let mut i = 0;
641        while i < num_packed {
642            let code = packed[i] as usize;
643            i += 1;
644            sink.put_bits(self.pt.codes[code] as u32, self.pt.code_sizes[code] as u32)?;
645            if code >= 16 {
646                sink.put_bits(packed[i] as u32, [2, 3, 7][code - 16])?;
647                i += 1;
648            }
649        }
650        Ok(())
651    }
652
653    fn start_static_block<S: Sink>(&mut self, sink: &mut S) -> Result<(), Error> {
654        let lengths = &mut self.lt.code_sizes;
655        lengths[0..144].iter_mut().for_each(|p| *p = 8);
656        lengths[144..256].iter_mut().for_each(|p| *p = 9);
657        lengths[256..280].iter_mut().for_each(|p| *p = 7);
658        lengths[280..288].iter_mut().for_each(|p| *p = 8);
659        self.dt.code_sizes = [5; 32];
660        self.lt.optimize(true);
661        self.dt.optimize(true);
662        sink.put_bits(1, 2)
663    }
664
665    fn emit_block<S: Sink>(&mut self, sink: &mut S, is_static: bool) -> Result<(), Error> {
666        if is_static {
667            self.start_static_block(sink)?;
668        } else {
669            self.start_dynamic_block(sink)?;
670        }
671        self.cb.emit(sink, &self.lt, &self.dt)
672    }
673}
674
675struct Rle {
676    prev: usize,
677    repeat_count: usize,
678    z_count: usize,
679}
680
681impl Rle {
682    fn new() -> Self {
683        Self {
684            prev: 0xFF,
685            repeat_count: 0,
686            z_count: 0,
687        }
688    }
689
690    #[inline(always)]
691    fn prev(&mut self, code_sizes: &mut [u8], count: &mut usize, pt: &mut PrecodeTree) {
692        if self.repeat_count == 0 {
693            return;
694        }
695        if self.repeat_count < 3 {
696            pt.counts[self.prev] += self.repeat_count as u16;
697            while self.repeat_count != 0 {
698                code_sizes[*count] = self.prev as u8;
699                *count += 1;
700                self.repeat_count -= 1;
701            }
702        } else {
703            pt.counts[16] += 1;
704            code_sizes[*count] = 16;
705            *count += 1;
706            code_sizes[*count] = (self.repeat_count - 3) as u8;
707            *count += 1;
708        }
709        self.repeat_count = 0;
710    }
711
712    #[inline(always)]
713    fn zero(&mut self, code_sizes: &mut [u8], count: &mut usize, pt: &mut PrecodeTree) {
714        if self.z_count == 0 {
715            return;
716        }
717        if self.z_count < 3 {
718            pt.counts[0] += self.z_count as u16;
719            while self.z_count != 0 {
720                code_sizes[*count] = 0;
721                *count += 1;
722                self.z_count -= 1;
723            }
724        } else if self.z_count <= 10 {
725            pt.counts[17] += 1;
726            code_sizes[*count] = 17;
727            *count += 1;
728            code_sizes[*count] = (self.z_count - 3) as u8;
729            *count += 1;
730        } else {
731            pt.counts[18] += 1;
732            code_sizes[*count] = 18;
733            *count += 1;
734            code_sizes[*count] = (self.z_count - 11) as u8;
735            *count += 1;
736        }
737        self.z_count = 0;
738    }
739}
740
741struct LiteralLengthTree {
742    pub counts: [u16; LITERAL_LENGTH_TREE_SIZE],
743    pub codes: [u16; LITERAL_LENGTH_TREE_SIZE],
744    pub code_sizes: [u8; LITERAL_LENGTH_TREE_SIZE],
745}
746
747impl LiteralLengthTree {
748    #[inline(always)]
749    fn new() -> Self {
750        Self {
751            counts: [0; LITERAL_LENGTH_TREE_SIZE],
752            codes: [0; LITERAL_LENGTH_TREE_SIZE],
753            code_sizes: [0; LITERAL_LENGTH_TREE_SIZE],
754        }
755    }
756
757    fn reset(&mut self) {
758        self.counts.iter_mut().for_each(|p| *p = 0);
759    }
760
761    fn optimize(&mut self, is_static: bool) {
762        huffman::optimize(
763            &mut self.counts,
764            &mut self.codes,
765            &mut self.code_sizes,
766            15,
767            is_static,
768        );
769    }
770}
771
772struct DistanceTree {
773    pub counts: [u16; DISTANCE_TREE_SIZE],
774    pub codes: [u16; DISTANCE_TREE_SIZE],
775    pub code_sizes: [u8; DISTANCE_TREE_SIZE],
776}
777
778impl DistanceTree {
779    #[inline(always)]
780    fn new() -> Self {
781        Self {
782            counts: [0; DISTANCE_TREE_SIZE],
783            codes: [0; DISTANCE_TREE_SIZE],
784            code_sizes: [0; DISTANCE_TREE_SIZE],
785        }
786    }
787
788    fn reset(&mut self) {
789        self.counts.iter_mut().for_each(|p| *p = 0);
790    }
791
792    fn optimize(&mut self, is_static: bool) {
793        huffman::optimize(
794            &mut self.counts,
795            &mut self.codes,
796            &mut self.code_sizes,
797            15,
798            is_static,
799        );
800    }
801}
802
803struct PrecodeTree {
804    pub counts: [u16; PRECODE_TREE_SIZE],
805    pub codes: [u16; PRECODE_TREE_SIZE],
806    pub code_sizes: [u8; PRECODE_TREE_SIZE],
807}
808
809impl PrecodeTree {
810    fn new() -> Self {
811        Self {
812            counts: [0; PRECODE_TREE_SIZE],
813            codes: [0; PRECODE_TREE_SIZE],
814            code_sizes: [0; PRECODE_TREE_SIZE],
815        }
816    }
817
818    fn reset(&mut self) {
819        self.counts.iter_mut().for_each(|p| *p = 0);
820    }
821
822    fn optimize(&mut self) {
823        huffman::optimize(
824            &mut self.counts,
825            &mut self.codes,
826            &mut self.code_sizes,
827            7,
828            false,
829        );
830    }
831}
832
833mod huffman {
834    const MAX_HUFF_SYMBOLS: usize = 288;
835    const MAX_SUPPORTED_HUFF_CODE_SIZE: usize = 32;
836
837    #[derive(Copy, Clone, Default)]
838    struct SymbolFrequency {
839        key: u16,
840        index: u16,
841    }
842
843    pub fn optimize(
844        counts: &mut [u16],
845        codes: &mut [u16],
846        code_sizes: &mut [u8],
847        size_limit: usize,
848        is_static: bool,
849    ) {
850        let mut num_codes = [0i32; 1 + MAX_SUPPORTED_HUFF_CODE_SIZE];
851        let mut next_code = [0u32; 1 + MAX_SUPPORTED_HUFF_CODE_SIZE];
852        let len = counts.len();
853        if is_static {
854            for i in 0..len {
855                num_codes[code_sizes[i] as usize] += 1;
856            }
857        } else {
858            let mut syms0 = [SymbolFrequency::default(); MAX_HUFF_SYMBOLS];
859            let mut syms1 = [SymbolFrequency::default(); MAX_HUFF_SYMBOLS];
860            let mut used = 0;
861            for i in 0..len {
862                let count = counts[i];
863                if count != 0 {
864                    let sym = &mut syms0[used];
865                    used += 1;
866                    sym.key = count;
867                    sym.index = i as u16;
868                }
869            }
870            let syms = sort_symbols(&mut syms0[..used], &mut syms1[..used]);
871            minimum_redundancy(syms);
872            for s in syms.iter() {
873                num_codes[s.key as usize] += 1;
874            }
875            enforce_size_limit(&mut num_codes, used, size_limit);
876            codes.iter_mut().for_each(|p| *p = 0);
877            code_sizes.iter_mut().for_each(|p| *p = 0);
878            let mut last = used;
879            for i in 1..=size_limit {
880                let first = last - num_codes[i] as usize;
881                for sym in &syms[first..last] {
882                    code_sizes[sym.index as usize] = i as u8;
883                }
884                last = first;
885            }
886        }
887        next_code[1] = 0;
888        let mut j = 0;
889        for i in 2..=size_limit {
890            j = (j + num_codes[i - 1]) << 1;
891            next_code[i] = j as u32;
892        }
893        for i in 0..len {
894            let code_size = code_sizes[i] as usize;
895            if code_size == 0 {
896                continue;
897            }
898            let mut code = next_code[code_size];
899            let mut rev_code = 0;
900            next_code[code_size] += 1;
901            for _ in 0..code_size {
902                rev_code = (rev_code << 1) | (code & 1);
903                code >>= 1;
904            }
905            codes[i] = rev_code as u16;
906        }
907    }
908
909    fn sort_symbols<'a>(
910        syms0: &'a mut [SymbolFrequency],
911        syms1: &'a mut [SymbolFrequency],
912    ) -> &'a mut [SymbolFrequency] {
913        let mut hist = [[0u32; 256]; 2];
914        for freq in syms0.iter() {
915            let key = freq.key as usize;
916            hist[0][key & 0xFF] += 1;
917            hist[1][(key >> 8) & 0xFF] += 1;
918        }
919        let mut passes = 2;
920        if syms0.len() == hist[1][0] as usize {
921            passes -= 1;
922        }
923        let mut offsets = [0u32; 256];
924        let mut cur_syms = syms0;
925        let mut new_syms = syms1;
926        for pass in 0..passes {
927            let mut offset = 0;
928            for i in 0..256 {
929                offsets[i] = offset;
930                offset += hist[pass][i];
931            }
932            for sym in cur_syms.iter() {
933                let j = ((sym.key >> (pass * 8)) & 0xFF) as usize;
934                new_syms[offsets[j] as usize] = *sym;
935                offsets[j] += 1;
936            }
937            core::mem::swap(&mut cur_syms, &mut new_syms);
938        }
939        cur_syms
940    }
941
942    fn minimum_redundancy(a: &mut [SymbolFrequency]) {
943        let n = a.len();
944        if n == 0 {
945            return;
946        } else if n == 1 {
947            a[0].key = 1;
948            return;
949        }
950        a[0].key += a[1].key;
951        let mut root = 0;
952        let mut leaf = 2;
953        for next in 1..n - 1 {
954            if leaf >= n || a[root].key < a[leaf].key {
955                a[next].key = a[root].key;
956                a[root].key = next as u16;
957                root += 1;
958            } else {
959                a[next].key = a[leaf].key;
960                leaf += 1;
961            }
962            if leaf >= n || (root < next && a[root].key < a[leaf].key) {
963                a[next].key += a[root].key;
964                a[root].key = next as u16;
965                root += 1;
966            } else {
967                a[next].key += a[leaf].key;
968                leaf += 1;
969            }
970        }
971        a[n - 2].key = 0;
972        for next in (0..n - 2).rev() {
973            a[next].key = a[a[next].key as usize].key + 1;
974        }
975        let mut avail = 1isize;
976        let mut used = 0isize;
977        let mut depth = 0;
978        let mut root = n as isize - 2;
979        let mut next = n as isize - 1;
980        while avail > 0 {
981            while root >= 0 && a[root as usize].key == depth {
982                used += 1;
983                root -= 1;
984            }
985            while avail > used {
986                a[next as usize].key = depth;
987                next -= 1;
988                avail -= 1;
989            }
990            avail = 2 * used;
991            depth += 1;
992            used = 0;
993        }
994    }
995
996    fn enforce_size_limit(num_codes: &mut [i32], len: usize, size_limit: usize) {
997        if len <= 1 {
998            return;
999        }
1000        for i in size_limit + 1..=MAX_SUPPORTED_HUFF_CODE_SIZE {
1001            num_codes[size_limit] += num_codes[i];
1002        }
1003        let mut total = 0;
1004        for i in (1..=size_limit).rev() {
1005            total += (num_codes[i] as u32) << (size_limit - i);
1006        }
1007        while total != (1 << size_limit) {
1008            num_codes[size_limit] -= 1;
1009            for i in (1..size_limit).rev() {
1010                if num_codes[i] != 0 {
1011                    num_codes[i] -= 1;
1012                    num_codes[i + 1] += 2;
1013                    break;
1014                }
1015            }
1016            total -= 1;
1017        }
1018    }
1019}
1020
1021struct CodeBuffer {
1022    pub buffer: [u8; CODE_BUFFER_SIZE],
1023    pub pos: usize,
1024    pub flags_offset: usize,
1025    pub flags_left: usize,
1026    pub total_bytes: usize,
1027}
1028
1029impl CodeBuffer {
1030    #[inline(always)]
1031    fn new() -> Self {
1032        Self {
1033            buffer: [0u8; CODE_BUFFER_SIZE],
1034            pos: 1,
1035            flags_offset: 0,
1036            flags_left: 8,
1037            total_bytes: 0,
1038        }
1039    }
1040
1041    fn reset(&mut self) {
1042        self.pos = 1;
1043        self.flags_offset = 0;
1044        self.flags_left = 8;
1045        self.total_bytes = 0;
1046    }
1047
1048    fn init_flag(&mut self) {
1049        if self.flags_left == 8 {
1050            self.buffer[self.flags_offset] = 0;
1051            self.pos -= 1;
1052        } else {
1053            self.buffer[self.flags_offset] >>= self.flags_left;
1054        }
1055    }
1056
1057    #[inline(always)]
1058    fn push_literal(&mut self, lit: u8, lt: &mut LiteralLengthTree) {
1059        self.buffer[self.pos] = lit;
1060        self.pos += 1;
1061        self.total_bytes += 1;
1062        self.buffer[self.flags_offset] >>= 1;
1063        self.flags_left -= 1;
1064        if self.flags_left == 0 {
1065            self.flags_left = 8;
1066            self.flags_offset = self.pos;
1067            self.pos += 1;
1068        }
1069        lt.counts[lit as usize] += 1;
1070    }
1071
1072    #[inline(always)]
1073    fn push_match(
1074        &mut self,
1075        len: usize,
1076        mut dist: usize,
1077        lt: &mut LiteralLengthTree,
1078        dt: &mut DistanceTree,
1079    ) {
1080        self.total_bytes += len;
1081        self.buffer[self.pos] = (len - MIN_MATCH_LEN) as u8;
1082        dist -= 1;
1083        self.buffer[self.pos + 1] = (dist & 0xFF) as u8;
1084        self.buffer[self.pos + 2] = (dist >> 8) as u8;
1085        self.pos += 3;
1086        self.buffer[self.flags_offset] = (self.buffer[self.flags_offset] >> 1) | 0x80;
1087        self.flags_left -= 1;
1088        if self.flags_left == 0 {
1089            self.flags_left = 8;
1090            self.flags_offset = self.pos;
1091            self.pos += 1;
1092        }
1093        let s = if dist < 512 {
1094            SMALL_DIST_SYM[dist & 511] as usize
1095        } else {
1096            LARGE_DIST_SYM[(dist >> 8) & 127] as usize
1097        };
1098        dt.counts[s] += 1;
1099        if len >= MIN_MATCH_LEN {
1100            lt.counts[LEN_SYM[len - MIN_MATCH_LEN] as usize] += 1;
1101        }
1102    }
1103
1104    fn emit<S: Sink>(
1105        &self,
1106        sink: &mut S,
1107        lt: &LiteralLengthTree,
1108        dt: &DistanceTree,
1109    ) -> Result<(), Error> {
1110        let mut flags = 1;
1111        let snap = sink.snapshot();
1112        let mut bits = FastBits::new(snap.bit_buffer, snap.bits_in);
1113        let mut i = 0;
1114        while i < self.pos {
1115            if flags == 1 {
1116                flags = self.buffer[i] as u32 | 0x100;
1117                i += 1;
1118            }
1119            if flags & 1 != 0 {
1120                if bits.bits_in > 16 {
1121                    bits.flush(sink)?;
1122                }
1123                let match_len = self.buffer[i] as usize;
1124                let match_dist = self.buffer[i + 1] as usize | ((self.buffer[i + 2] as usize) << 8);
1125                i += 3;
1126                let i0 = LEN_SYM[match_len & 0xFF] as usize;
1127                bits.put(lt.codes[i0] as u32, lt.code_sizes[i0] as u32);
1128                let extra = LEN_EXTRA[match_len & 0xFF] as usize;
1129                bits.put(match_len as u32 & BITMASKS[extra], extra as u32);
1130                let (sym, extra_bits) = if match_dist < 512 {
1131                    (
1132                        SMALL_DIST_SYM[match_dist & 511] as usize,
1133                        SMALL_DIST_EXTRA[match_dist & 511] as usize,
1134                    )
1135                } else {
1136                    (
1137                        LARGE_DIST_SYM[(match_dist >> 8) & 127] as usize,
1138                        LARGE_DIST_EXTRA[(match_dist >> 8) & 127] as usize,
1139                    )
1140                };
1141                bits.put(dt.codes[sym] as u32, dt.code_sizes[sym] as u32);
1142                bits.put(match_dist as u32 & BITMASKS[extra_bits], extra_bits as u32);
1143            } else {
1144                let lit = self.buffer[i] as usize;
1145                i += 1;
1146                if bits.bits_in > 48 {
1147                    bits.flush(sink)?;
1148                }
1149                bits.put(
1150                    lt.codes[lit & 0xFF] as u32,
1151                    lt.code_sizes[lit & 0xFF] as u32,
1152                );
1153            }
1154            flags >>= 1;
1155        }
1156        bits.flush(sink)?;
1157        sink.set_bit_buffer(bits.bit_buffer as u32, bits.bits_in);
1158        sink.put_bits(lt.codes[256] as u32, lt.code_sizes[256] as u32)
1159    }
1160}
1161
1162struct FastBits {
1163    bit_buffer: u64,
1164    bits_in: u32,
1165    buf: [u8; 8],
1166}
1167
1168impl FastBits {
1169    pub fn new(bit_buffer: u32, bits_in: u32) -> Self {
1170        Self {
1171            bit_buffer: bit_buffer as u64,
1172            bits_in,
1173            buf: [0; 8],
1174        }
1175    }
1176
1177    #[inline(always)]
1178    pub fn put(&mut self, bits: u32, len: u32) {
1179        self.bit_buffer |= (bits as u64) << self.bits_in;
1180        self.bits_in += len;
1181    }
1182
1183    #[inline(always)]
1184    pub fn flush<S: Sink>(&mut self, sink: &mut S) -> Result<(), Error> {
1185        let mut i = 0;
1186        while self.bits_in >= 8 {
1187            self.buf[i] = self.bit_buffer as u8;
1188            self.bit_buffer >>= 8;
1189            self.bits_in -= 8;
1190            i += 1;
1191        }
1192        sink.write(&self.buf[0..i])
1193    }
1194}
1195
1196struct Dictionary {
1197    pub dict: [u8; DICTIONARY_FULL_SIZE],
1198    pub next: [u16; DICTIONARY_SIZE],
1199    pub hash: [u16; HASH_SIZE],
1200    pub code_buffer_offset: usize,
1201    pub max_probes: [u32; 2],
1202    pub lookahead_size: usize,
1203    pub lookahead_pos: usize,
1204    pub len: usize,
1205}
1206
1207impl Dictionary {
1208    #[inline(always)]
1209    fn new(flags: u32) -> Self {
1210        Self {
1211            dict: [0; DICTIONARY_FULL_SIZE],
1212            next: [0; DICTIONARY_SIZE],
1213            hash: [0; HASH_SIZE],
1214            code_buffer_offset: 0,
1215            max_probes: Self::probes_from_flags(flags),
1216            lookahead_size: 0,
1217            lookahead_pos: 0,
1218            len: 0,
1219        }
1220    }
1221
1222    fn probes_from_flags(flags: u32) -> [u32; 2] {
1223        [
1224            1 + ((flags & 0xFFF) + 2) / 3,
1225            1 + (((flags & 0xFFF) >> 2) + 2) / 3,
1226        ]
1227    }
1228
1229    fn read_u64(&self, pos: usize) -> u64 {
1230        let bytes: [u8; 8] = self.dict[pos..pos + 8].try_into().unwrap();
1231        u64::from_le_bytes(bytes)
1232    }
1233
1234    fn read_u16(&self, pos: usize) -> u16 {
1235        self.dict[pos] as u16 | ((self.dict[pos + 1] as u16) << 8)
1236    }
1237
1238    fn get(&self, pos: usize) -> u8 {
1239        self.dict[pos.min(self.dict.len() - 1)]
1240    }
1241
1242    fn find_match(
1243        &self,
1244        lookahead_pos: usize,
1245        max_dist: usize,
1246        max_match_len: usize,
1247        mut match_dist: usize,
1248        mut match_len: usize,
1249    ) -> (usize, usize) {
1250        let max_match_len = max_match_len.min(MAX_MATCH_LEN);
1251        match_len = match_len.max(1);
1252        let pos = lookahead_pos & DICTIONARY_SIZE_MASK;
1253        let mut probe_pos = pos;
1254        let mut num_probes_left = self.max_probes[(match_len >= 32) as usize];
1255        if max_match_len <= match_len {
1256            return (match_dist, match_len);
1257        }
1258        let mut c01 = self.read_u16(pos + match_len - 1);
1259        let s01 = self.read_u16(pos);
1260        'outer: loop {
1261            let mut dist;
1262            'found: loop {
1263                num_probes_left -= 1;
1264                if num_probes_left == 0 {
1265                    return (match_dist, match_len);
1266                }
1267                for _ in 0..3 {
1268                    let next_probe_pos = self.next[probe_pos] as usize;
1269                    dist = (lookahead_pos - next_probe_pos) & 0xFFFF;
1270                    if next_probe_pos == 0 || dist > max_dist {
1271                        return (match_dist, match_len);
1272                    }
1273                    probe_pos = next_probe_pos & DICTIONARY_SIZE_MASK;
1274                    if self.read_u16(probe_pos + match_len - 1) == c01 {
1275                        break 'found;
1276                    }
1277                }
1278            }
1279            if dist == 0 {
1280                return (match_dist, match_len);
1281            }
1282            if self.read_u16(probe_pos) != s01 {
1283                continue;
1284            }
1285            let mut p = pos + 2;
1286            let mut q = probe_pos + 2;
1287            for _ in 0..32 {
1288                let p_data: u64 = self.read_u64(p);
1289                let q_data: u64 = self.read_u64(q);
1290                let xor_data = p_data ^ q_data;
1291                if xor_data == 0 {
1292                    p += 8;
1293                    q += 8;
1294                } else {
1295                    let trailing = xor_data.trailing_zeros() as usize;
1296                    let probe_len = p - pos + (trailing >> 3);
1297                    if probe_len > match_len {
1298                        match_dist = dist;
1299                        match_len = max_match_len.min(probe_len);
1300                        if match_len == max_match_len {
1301                            return (match_dist, match_len);
1302                        }
1303                        c01 = self.read_u16(pos + match_len - 1)
1304                    }
1305                    continue 'outer;
1306                }
1307            }
1308            return (dist, max_match_len.min(MAX_MATCH_LEN));
1309        }
1310    }
1311}
1312
1313#[doc(hidden)]
1314pub struct Snapshot {
1315    pos: usize,
1316    bit_buffer: u32,
1317    bits_in: u32,
1318}
1319
1320#[doc(hidden)]
1321pub trait Sink {
1322    fn put_bits(&mut self, bits: u32, len: u32) -> Result<(), Error>;
1323    fn write(&mut self, buf: &[u8]) -> Result<(), Error>;
1324    fn pad(&mut self) -> Result<(), Error>;
1325    fn snapshot(&self) -> Snapshot;
1326    fn restore(&mut self, snapshot: &Snapshot);
1327    fn set_bit_buffer(&mut self, bit_buffer: u32, bits_in: u32);
1328    fn flush(&mut self) -> Result<(), Error> {
1329        Ok(())
1330    }
1331    fn written(&self) -> u64;
1332}
1333
1334struct BufSink<'a> {
1335    buffer: &'a mut [u8],
1336    pos: usize,
1337    bit_buffer: u32,
1338    bits_in: u32,
1339}
1340
1341impl<'a> BufSink<'a> {
1342    pub fn new(buffer: &'a mut [u8]) -> Self {
1343        Self {
1344            buffer,
1345            pos: 0,
1346            bit_buffer: 0,
1347            bits_in: 0,
1348        }
1349    }
1350}
1351
1352impl Sink for BufSink<'_> {
1353    #[inline(always)]
1354    fn put_bits(&mut self, bits: u32, len: u32) -> Result<(), Error> {
1355        self.bit_buffer |= bits << self.bits_in;
1356        self.bits_in += len;
1357        let limit = self.buffer.len();
1358        while self.bits_in >= 8 {
1359            if self.pos == limit {
1360                return Err(Error::Overflow);
1361            }
1362            self.buffer[self.pos] = self.bit_buffer as u8;
1363            self.pos += 1;
1364            self.bit_buffer >>= 8;
1365            self.bits_in -= 8;
1366        }
1367        Ok(())
1368    }
1369
1370    #[inline(always)]
1371    fn write(&mut self, buf: &[u8]) -> Result<(), Error> {
1372        let len = buf.len();
1373        if self.pos + len > self.buffer.len() {
1374            return Err(Error::Overflow);
1375        }
1376        self.buffer[self.pos..self.pos + len].copy_from_slice(buf);
1377        self.pos += len;
1378        Ok(())
1379    }
1380
1381    fn pad(&mut self) -> Result<(), Error> {
1382        if self.bits_in != 0 {
1383            let len = 8 - self.bits_in;
1384            self.put_bits(0, len)
1385        } else {
1386            Ok(())
1387        }
1388    }
1389
1390    fn snapshot(&self) -> Snapshot {
1391        Snapshot {
1392            pos: self.pos,
1393            bit_buffer: self.bit_buffer,
1394            bits_in: self.bits_in,
1395        }
1396    }
1397
1398    fn restore(&mut self, snapshot: &Snapshot) {
1399        self.pos = snapshot.pos;
1400        self.bit_buffer = snapshot.bit_buffer;
1401        self.bits_in = snapshot.bits_in;
1402    }
1403
1404    fn set_bit_buffer(&mut self, bit_buffer: u32, bits_in: u32) {
1405        self.bit_buffer = bit_buffer;
1406        self.bits_in = bits_in;
1407    }
1408
1409    fn written(&self) -> u64 {
1410        self.pos as u64
1411    }
1412}
1413
1414struct VecSink<'a> {
1415    buffer: &'a mut Vec<u8>,
1416    start_pos: usize,
1417    bit_buffer: u32,
1418    bits_in: u32,
1419}
1420
1421impl<'a> VecSink<'a> {
1422    pub fn new(buffer: &'a mut Vec<u8>) -> Self {
1423        let start_pos = buffer.len();
1424        Self {
1425            buffer,
1426            start_pos,
1427            bit_buffer: 0,
1428            bits_in: 0,
1429        }
1430    }
1431}
1432
1433impl Sink for VecSink<'_> {
1434    #[inline(always)]
1435    fn put_bits(&mut self, bits: u32, len: u32) -> Result<(), Error> {
1436        self.bit_buffer |= bits << self.bits_in;
1437        self.bits_in += len;
1438        while self.bits_in >= 8 {
1439            self.buffer.push(self.bit_buffer as u8);
1440            self.bit_buffer >>= 8;
1441            self.bits_in -= 8;
1442        }
1443        Ok(())
1444    }
1445
1446    #[inline(always)]
1447    fn write(&mut self, buf: &[u8]) -> Result<(), Error> {
1448        self.buffer.extend_from_slice(buf);
1449        Ok(())
1450    }
1451
1452    fn pad(&mut self) -> Result<(), Error> {
1453        if self.bits_in != 0 {
1454            let len = 8 - self.bits_in;
1455            self.put_bits(0, len)
1456        } else {
1457            Ok(())
1458        }
1459    }
1460
1461    fn snapshot(&self) -> Snapshot {
1462        Snapshot {
1463            pos: self.buffer.len(),
1464            bit_buffer: self.bit_buffer,
1465            bits_in: self.bits_in,
1466        }
1467    }
1468
1469    fn restore(&mut self, snapshot: &Snapshot) {
1470        self.buffer.truncate(snapshot.pos);
1471        self.bit_buffer = snapshot.bit_buffer;
1472        self.bits_in = snapshot.bits_in;
1473    }
1474
1475    fn set_bit_buffer(&mut self, bit_buffer: u32, bits_in: u32) {
1476        self.bit_buffer = bit_buffer;
1477        self.bits_in = bits_in;
1478    }
1479
1480    fn written(&self) -> u64 {
1481        (self.buffer.len() - self.start_pos) as u64
1482    }
1483}
1484
1485#[cfg(feature = "std")]
1486struct WriterSink<W> {
1487    writer: W,
1488    buffer: [u8; OUT_BUFFER_SIZE],
1489    pos: usize,
1490    bit_buffer: u32,
1491    bits_in: u32,
1492    written: u64,
1493}
1494
1495#[cfg(feature = "std")]
1496impl<W> WriterSink<W> {
1497    fn new(writer: W) -> Self {
1498        Self {
1499            writer,
1500            buffer: [0; OUT_BUFFER_SIZE],
1501            pos: 0,
1502            bit_buffer: 0,
1503            bits_in: 0,
1504            written: 0,
1505        }
1506    }
1507}
1508
1509#[cfg(feature = "std")]
1510impl<W: Write> Sink for WriterSink<W> {
1511    #[inline(always)]
1512    fn put_bits(&mut self, bits: u32, len: u32) -> Result<(), Error> {
1513        self.bit_buffer |= bits << self.bits_in;
1514        self.bits_in += len;
1515        let limit = self.buffer.len();
1516        while self.bits_in >= 8 {
1517            if self.pos == limit {
1518                return Err(Error::Overflow);
1519            }
1520            self.buffer[self.pos] = self.bit_buffer as u8;
1521            self.pos += 1;
1522            self.bit_buffer >>= 8;
1523            self.bits_in -= 8;
1524        }
1525        Ok(())
1526    }
1527
1528    #[inline(always)]
1529    fn write(&mut self, buf: &[u8]) -> Result<(), Error> {
1530        let len = buf.len();
1531        if self.pos + len > self.buffer.len() {
1532            return Err(Error::Overflow);
1533        }
1534        self.buffer[self.pos..self.pos + len].copy_from_slice(buf);
1535        self.pos += len;
1536        Ok(())
1537    }
1538
1539    fn pad(&mut self) -> Result<(), Error> {
1540        if self.bits_in != 0 {
1541            let len = 8 - self.bits_in;
1542            self.put_bits(0, len)
1543        } else {
1544            Ok(())
1545        }
1546    }
1547
1548    fn snapshot(&self) -> Snapshot {
1549        Snapshot {
1550            pos: self.pos,
1551            bit_buffer: self.bit_buffer,
1552            bits_in: self.bits_in,
1553        }
1554    }
1555
1556    fn restore(&mut self, snapshot: &Snapshot) {
1557        self.pos = snapshot.pos;
1558        self.bit_buffer = snapshot.bit_buffer;
1559        self.bits_in = snapshot.bits_in;
1560    }
1561
1562    fn set_bit_buffer(&mut self, bit_buffer: u32, bits_in: u32) {
1563        self.bit_buffer = bit_buffer;
1564        self.bits_in = bits_in;
1565    }
1566
1567    fn flush(&mut self) -> Result<(), Error> {
1568        let res = match self.writer.write_all(&self.buffer[0..self.pos]) {
1569            Ok(_) => Ok(()),
1570            Err(err) => Err(Error::Io(err)),
1571        };
1572        self.written += self.pos as u64;
1573        self.pos = 0;
1574        res
1575    }
1576
1577    fn written(&self) -> u64 {
1578        self.written
1579    }
1580}
1581
1582fn make_flags(zlib: bool, level: CompressionLevel, strategy: CompressionStrategy) -> u32 {
1583    let level = level.to_raw();
1584    let greedy = if level <= 3 { GREEDY_PARSING } else { 0 };
1585    let mut flags = NUM_PROBES[level] | greedy;
1586    if zlib {
1587        flags |= WRITE_ZLIB_HEADER;
1588    }
1589    if level == 0 {
1590        flags |= FORCE_RAW;
1591    } else {
1592        use CompressionStrategy::*;
1593        match strategy {
1594            Filtered => flags |= FILTER_MATCHES,
1595            Huffman => flags &= !MAX_PROBES_MASK as u32,
1596            Static => flags |= FORCE_STATIC,
1597            RLE => flags |= RLE_MATCHES,
1598            _ => {}
1599        }
1600    }
1601    flags
1602}
1603
1604fn make_zlib_header(flags: u32) -> [u8; 2] {
1605    const FCHECK_DIVISOR: u32 = 31;
1606    let num_probes = flags & (MAX_PROBES_MASK as u32);
1607    let level = if flags & GREEDY_PARSING != 0 {
1608        if num_probes <= 1 {
1609            0
1610        } else {
1611            1
1612        }
1613    } else if num_probes >= NUM_PROBES[9] {
1614        3
1615    } else {
1616        2
1617    };
1618    let cmf = 8 | (7 << 4);
1619    let flg = (level as u8) << 6;
1620    let rem = ((cmf as u32 * 256) + flg as u32) % FCHECK_DIVISOR;
1621    let check = (flg & 0b11100000) + (FCHECK_DIVISOR - rem) as u8;
1622    [cmf, check]
1623}
1624
1625const LITERAL_LENGTH_TREE_SIZE: usize = 288;
1626const DISTANCE_TREE_SIZE: usize = 32;
1627const PRECODE_TREE_SIZE: usize = 19;
1628// const CODE_BUFFER_SIZE: usize = 24 * 1024;
1629// const HASH_BITS: usize = 12;
1630const CODE_BUFFER_SIZE: usize = 64 * 1024;
1631const HASH_BITS: usize = 15;
1632const HASH_SHIFT: usize = (HASH_BITS + 2) / 3;
1633const HASH_SIZE: usize = 1 << HASH_BITS;
1634#[cfg(feature = "std")]
1635const OUT_BUFFER_SIZE: usize = (CODE_BUFFER_SIZE * 13) / 10;
1636const MIN_MATCH_LEN: usize = 3;
1637const MAX_MATCH_LEN: usize = 258;
1638const DICTIONARY_SIZE: usize = 32768;
1639const DICTIONARY_SIZE_MASK: usize = DICTIONARY_SIZE - 1;
1640const DICTIONARY_FULL_SIZE: usize = DICTIONARY_SIZE + MAX_MATCH_LEN;
1641
1642const WRITE_ZLIB_HEADER: u32 = 0x0000_1000;
1643const GREEDY_PARSING: u32 = 0x0000_4000;
1644const RLE_MATCHES: u32 = 0x0001_0000;
1645const FILTER_MATCHES: u32 = 0x0002_0000;
1646const FORCE_STATIC: u32 = 0x0004_0000;
1647const FORCE_RAW: u32 = 0x0008_0000;
1648
1649const MAX_PROBES_MASK: i32 = 0xFFF;
1650const NUM_PROBES: [u32; 11] = [0, 1, 6, 32, 16, 32, 128, 256, 512, 768, 1500];
1651
1652const LEN_SYM: [u16; 256] = [
1653    257, 258, 259, 260, 261, 262, 263, 264, 265, 265, 266, 266, 267, 267, 268, 268, 269, 269, 269,
1654    269, 270, 270, 270, 270, 271, 271, 271, 271, 272, 272, 272, 272, 273, 273, 273, 273, 273, 273,
1655    273, 273, 274, 274, 274, 274, 274, 274, 274, 274, 275, 275, 275, 275, 275, 275, 275, 275, 276,
1656    276, 276, 276, 276, 276, 276, 276, 277, 277, 277, 277, 277, 277, 277, 277, 277, 277, 277, 277,
1657    277, 277, 277, 277, 278, 278, 278, 278, 278, 278, 278, 278, 278, 278, 278, 278, 278, 278, 278,
1658    278, 279, 279, 279, 279, 279, 279, 279, 279, 279, 279, 279, 279, 279, 279, 279, 279, 280, 280,
1659    280, 280, 280, 280, 280, 280, 280, 280, 280, 280, 280, 280, 280, 280, 281, 281, 281, 281, 281,
1660    281, 281, 281, 281, 281, 281, 281, 281, 281, 281, 281, 281, 281, 281, 281, 281, 281, 281, 281,
1661    281, 281, 281, 281, 281, 281, 281, 281, 282, 282, 282, 282, 282, 282, 282, 282, 282, 282, 282,
1662    282, 282, 282, 282, 282, 282, 282, 282, 282, 282, 282, 282, 282, 282, 282, 282, 282, 282, 282,
1663    282, 282, 283, 283, 283, 283, 283, 283, 283, 283, 283, 283, 283, 283, 283, 283, 283, 283, 283,
1664    283, 283, 283, 283, 283, 283, 283, 283, 283, 283, 283, 283, 283, 283, 283, 284, 284, 284, 284,
1665    284, 284, 284, 284, 284, 284, 284, 284, 284, 284, 284, 284, 284, 284, 284, 284, 284, 284, 284,
1666    284, 284, 284, 284, 284, 284, 284, 284, 285,
1667];
1668
1669const LEN_EXTRA: [u8; 256] = [
1670    0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
1671    3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
1672    4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
1673    4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
1674    5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
1675    5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
1676    5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
1677    5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 0,
1678];
1679
1680const SMALL_DIST_SYM: [u8; 512] = [
1681    0, 1, 2, 3, 4, 4, 5, 5, 6, 6, 6, 6, 7, 7, 7, 7, 8, 8, 8, 8, 8, 8, 8, 8, 9, 9, 9, 9, 9, 9, 9, 9,
1682    10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 11, 11, 11, 11, 11, 11, 11, 11,
1683    11, 11, 11, 11, 11, 11, 11, 11, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12,
1684    12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 13, 13, 13, 13, 13, 13, 13, 13,
1685    13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
1686    14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
1687    14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
1688    14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 15, 15, 15, 15, 15, 15, 15, 15,
1689    15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
1690    15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
1691    15, 15, 15, 15, 15, 15, 15, 15, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
1692    16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
1693    16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
1694    16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
1695    16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
1696    16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 17, 17, 17, 17, 17, 17, 17, 17,
1697    17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
1698    17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
1699    17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
1700    17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
1701    17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
1702];
1703
1704const SMALL_DIST_EXTRA: [u8; 512] = [
1705    0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
1706    4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
1707    5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
1708    5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
1709    6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
1710    6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
1711    6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
1712    6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
1713    7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
1714    7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
1715    7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
1716    7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
1717    7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
1718    7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
1719    7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
1720    7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
1721];
1722
1723const LARGE_DIST_SYM: [u8; 128] = [
1724    0, 0, 18, 19, 20, 20, 21, 21, 22, 22, 22, 22, 23, 23, 23, 23, 24, 24, 24, 24, 24, 24, 24, 24,
1725    25, 25, 25, 25, 25, 25, 25, 25, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
1726    27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 28, 28, 28, 28, 28, 28, 28, 28,
1727    28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
1728    29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
1729    29, 29, 29, 29, 29, 29, 29, 29,
1730];
1731
1732const LARGE_DIST_EXTRA: [u8; 128] = [
1733    0, 0, 8, 8, 9, 9, 9, 9, 10, 10, 10, 10, 10, 10, 10, 10, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
1734    11, 11, 11, 11, 11, 11, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12,
1735    12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
1736    13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
1737    13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
1738    13, 13, 13, 13, 13, 13,
1739];
1740
1741const PRECODE_SWIZZLE: [u8; 19] = [
1742    16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15,
1743];
1744
1745const BITMASKS: [u32; 17] = [
1746    0x0000, 0x0001, 0x0003, 0x0007, 0x000F, 0x001F, 0x003F, 0x007F, 0x00FF, 0x01FF, 0x03FF, 0x07FF,
1747    0x0FFF, 0x1FFF, 0x3FFF, 0x7FFF, 0xFFFF,
1748];