zlib.c revision 50477
1/*
2 * This file is derived from various .h and .c files from the zlib-1.0.4
3 * distribution by Jean-loup Gailly and Mark Adler, with some additions
4 * by Paul Mackerras to aid in implementing Deflate compression and
5 * decompression for PPP packets.  See zlib.h for conditions of
6 * distribution and use.
7 *
8 * Changes that have been made include:
9 * - added Z_PACKET_FLUSH (see zlib.h for details)
10 * - added inflateIncomp and deflateOutputPending
11 * - allow strm->next_out to be NULL, meaning discard the output
12 *
13 * $FreeBSD: head/sys/net/zlib.c 50477 1999-08-28 01:08:13Z peter $
14 */
15
16/*
17 *  ==FILEVERSION 971210==
18 *
19 * This marker is used by the Linux installation script to determine
20 * whether an up-to-date version of this file is already installed.
21 */
22
23#define NO_DUMMY_DECL
24#define NO_ZCFUNCS
25#define MY_ZCALLOC
26
27#if defined(__FreeBSD__) && (defined(KERNEL) || defined(_KERNEL))
28#define inflate	inflate_ppp	/* FreeBSD already has an inflate :-( */
29#endif
30
31
32/* +++ zutil.h */
33/* zutil.h -- internal interface and configuration of the compression library
34 * Copyright (C) 1995-1996 Jean-loup Gailly.
35 * For conditions of distribution and use, see copyright notice in zlib.h
36 */
37
38/* WARNING: this file should *not* be used by applications. It is
39   part of the implementation of the compression library and is
40   subject to change. Applications should only use zlib.h.
41 */
42
43/* From: zutil.h,v 1.16 1996/07/24 13:41:13 me Exp $ */
44
45#ifndef _Z_UTIL_H
46#define _Z_UTIL_H
47
48#ifdef KERNEL
49#include <net/zlib.h>
50#else
51#include "zlib.h"
52#endif
53
54#if defined(KERNEL) || defined(_KERNEL)
55/* Assume this is a *BSD or SVR4 kernel */
56#include <sys/types.h>
57#include <sys/time.h>
58#include <sys/systm.h>
59#  define HAVE_MEMCPY
60#  define memcpy(d, s, n)	bcopy((s), (d), (n))
61#  define memset(d, v, n)	bzero((d), (n))
62#  define memcmp		bcmp
63
64#else
65#if defined(__KERNEL__)
66/* Assume this is a Linux kernel */
67#include <linux/string.h>
68#define HAVE_MEMCPY
69
70#else /* not kernel */
71
72#if defined(MSDOS)||defined(VMS)||defined(CRAY)||defined(WIN32)||defined(RISCOS)
73#   include <stddef.h>
74#   include <errno.h>
75#else
76    extern int errno;
77#endif
78#ifdef STDC
79#  include <string.h>
80#  include <stdlib.h>
81#endif
82#endif /* __KERNEL__ */
83#endif /* _KERNEL || KERNEL */
84
85#ifndef local
86#  define local static
87#endif
88/* compile with -Dlocal if your debugger can't find static symbols */
89
90typedef unsigned char  uch;
91typedef uch FAR uchf;
92typedef unsigned short ush;
93typedef ush FAR ushf;
94typedef unsigned long  ulg;
95
96extern const char *z_errmsg[10]; /* indexed by 2-zlib_error */
97/* (size given to avoid silly warnings with Visual C++) */
98
99#define ERR_MSG(err) z_errmsg[Z_NEED_DICT-(err)]
100
101#define ERR_RETURN(strm,err) \
102  return (strm->msg = (const char*)ERR_MSG(err), (err))
103/* To be used only when the state is known to be valid */
104
105        /* common constants */
106
107#ifndef DEF_WBITS
108#  define DEF_WBITS MAX_WBITS
109#endif
110/* default windowBits for decompression. MAX_WBITS is for compression only */
111
112#if MAX_MEM_LEVEL >= 8
113#  define DEF_MEM_LEVEL 8
114#else
115#  define DEF_MEM_LEVEL  MAX_MEM_LEVEL
116#endif
117/* default memLevel */
118
119#define STORED_BLOCK 0
120#define STATIC_TREES 1
121#define DYN_TREES    2
122/* The three kinds of block type */
123
124#define MIN_MATCH  3
125#define MAX_MATCH  258
126/* The minimum and maximum match lengths */
127
128#define PRESET_DICT 0x20 /* preset dictionary flag in zlib header */
129
130        /* target dependencies */
131
132#ifdef MSDOS
133#  define OS_CODE  0x00
134#  ifdef __TURBOC__
135#    include <alloc.h>
136#  else /* MSC or DJGPP */
137#    include <malloc.h>
138#  endif
139#endif
140
141#ifdef OS2
142#  define OS_CODE  0x06
143#endif
144
145#ifdef WIN32 /* Window 95 & Windows NT */
146#  define OS_CODE  0x0b
147#endif
148
149#if defined(VAXC) || defined(VMS)
150#  define OS_CODE  0x02
151#  define FOPEN(name, mode) \
152     fopen((name), (mode), "mbc=60", "ctx=stm", "rfm=fix", "mrs=512")
153#endif
154
155#ifdef AMIGA
156#  define OS_CODE  0x01
157#endif
158
159#if defined(ATARI) || defined(atarist)
160#  define OS_CODE  0x05
161#endif
162
163#ifdef MACOS
164#  define OS_CODE  0x07
165#endif
166
167#ifdef __50SERIES /* Prime/PRIMOS */
168#  define OS_CODE  0x0F
169#endif
170
171#ifdef TOPS20
172#  define OS_CODE  0x0a
173#endif
174
175#if defined(_BEOS_) || defined(RISCOS)
176#  define fdopen(fd,mode) NULL /* No fdopen() */
177#endif
178
179        /* Common defaults */
180
181#ifndef OS_CODE
182#  define OS_CODE  0x03  /* assume Unix */
183#endif
184
185#ifndef FOPEN
186#  define FOPEN(name, mode) fopen((name), (mode))
187#endif
188
189         /* functions */
190
191#ifdef HAVE_STRERROR
192   extern char *strerror OF((int));
193#  define zstrerror(errnum) strerror(errnum)
194#else
195#  define zstrerror(errnum) ""
196#endif
197
198#if defined(pyr)
199#  define NO_MEMCPY
200#endif
201#if (defined(M_I86SM) || defined(M_I86MM)) && !defined(_MSC_VER)
202 /* Use our own functions for small and medium model with MSC <= 5.0.
203  * You may have to use the same strategy for Borland C (untested).
204  */
205#  define NO_MEMCPY
206#endif
207#if defined(STDC) && !defined(HAVE_MEMCPY) && !defined(NO_MEMCPY)
208#  define HAVE_MEMCPY
209#endif
210#ifdef HAVE_MEMCPY
211#  ifdef SMALL_MEDIUM /* MSDOS small or medium model */
212#    define zmemcpy _fmemcpy
213#    define zmemcmp _fmemcmp
214#    define zmemzero(dest, len) _fmemset(dest, 0, len)
215#  else
216#    define zmemcpy memcpy
217#    define zmemcmp memcmp
218#    define zmemzero(dest, len) memset(dest, 0, len)
219#  endif
220#else
221   extern void zmemcpy  OF((Bytef* dest, Bytef* source, uInt len));
222   extern int  zmemcmp  OF((Bytef* s1,   Bytef* s2, uInt len));
223   extern void zmemzero OF((Bytef* dest, uInt len));
224#endif
225
226/* Diagnostic functions */
227#ifdef DEBUG_ZLIB
228#  include <stdio.h>
229#  ifndef verbose
230#    define verbose 0
231#  endif
232   extern void z_error    OF((char *m));
233#  define Assert(cond,msg) {if(!(cond)) z_error(msg);}
234#  define Trace(x) fprintf x
235#  define Tracev(x) {if (verbose) fprintf x ;}
236#  define Tracevv(x) {if (verbose>1) fprintf x ;}
237#  define Tracec(c,x) {if (verbose && (c)) fprintf x ;}
238#  define Tracecv(c,x) {if (verbose>1 && (c)) fprintf x ;}
239#else
240#  define Assert(cond,msg)
241#  define Trace(x)
242#  define Tracev(x)
243#  define Tracevv(x)
244#  define Tracec(c,x)
245#  define Tracecv(c,x)
246#endif
247
248
249typedef uLong (*check_func) OF((uLong check, const Bytef *buf, uInt len));
250
251voidpf zcalloc OF((voidpf opaque, unsigned items, unsigned size));
252void   zcfree  OF((voidpf opaque, voidpf ptr));
253
254#define ZALLOC(strm, items, size) \
255           (*((strm)->zalloc))((strm)->opaque, (items), (size))
256#define ZFREE(strm, addr)  (*((strm)->zfree))((strm)->opaque, (voidpf)(addr))
257#define TRY_FREE(s, p) {if (p) ZFREE(s, p);}
258
259#endif /* _Z_UTIL_H */
260/* --- zutil.h */
261
262/* +++ deflate.h */
263/* deflate.h -- internal compression state
264 * Copyright (C) 1995-1996 Jean-loup Gailly
265 * For conditions of distribution and use, see copyright notice in zlib.h
266 */
267
268/* WARNING: this file should *not* be used by applications. It is
269   part of the implementation of the compression library and is
270   subject to change. Applications should only use zlib.h.
271 */
272
273/* From: deflate.h,v 1.10 1996/07/02 12:41:00 me Exp $ */
274
275#ifndef _DEFLATE_H
276#define _DEFLATE_H
277
278/* #include "zutil.h" */
279
280/* ===========================================================================
281 * Internal compression state.
282 */
283
284#define LENGTH_CODES 29
285/* number of length codes, not counting the special END_BLOCK code */
286
287#define LITERALS  256
288/* number of literal bytes 0..255 */
289
290#define L_CODES (LITERALS+1+LENGTH_CODES)
291/* number of Literal or Length codes, including the END_BLOCK code */
292
293#define D_CODES   30
294/* number of distance codes */
295
296#define BL_CODES  19
297/* number of codes used to transfer the bit lengths */
298
299#define HEAP_SIZE (2*L_CODES+1)
300/* maximum heap size */
301
302#define MAX_BITS 15
303/* All codes must not exceed MAX_BITS bits */
304
305#define INIT_STATE    42
306#define BUSY_STATE   113
307#define FINISH_STATE 666
308/* Stream status */
309
310
311/* Data structure describing a single value and its code string. */
312typedef struct ct_data_s {
313    union {
314        ush  freq;       /* frequency count */
315        ush  code;       /* bit string */
316    } fc;
317    union {
318        ush  dad;        /* father node in Huffman tree */
319        ush  len;        /* length of bit string */
320    } dl;
321} FAR ct_data;
322
323#define Freq fc.freq
324#define Code fc.code
325#define Dad  dl.dad
326#define Len  dl.len
327
328typedef struct static_tree_desc_s  static_tree_desc;
329
330typedef struct tree_desc_s {
331    ct_data *dyn_tree;           /* the dynamic tree */
332    int     max_code;            /* largest code with non zero frequency */
333    static_tree_desc *stat_desc; /* the corresponding static tree */
334} FAR tree_desc;
335
336typedef ush Pos;
337typedef Pos FAR Posf;
338typedef unsigned IPos;
339
340/* A Pos is an index in the character window. We use short instead of int to
341 * save space in the various tables. IPos is used only for parameter passing.
342 */
343
344typedef struct deflate_state {
345    z_streamp strm;      /* pointer back to this zlib stream */
346    int   status;        /* as the name implies */
347    Bytef *pending_buf;  /* output still pending */
348    ulg   pending_buf_size; /* size of pending_buf */
349    Bytef *pending_out;  /* next pending byte to output to the stream */
350    int   pending;       /* nb of bytes in the pending buffer */
351    int   noheader;      /* suppress zlib header and adler32 */
352    Byte  data_type;     /* UNKNOWN, BINARY or ASCII */
353    Byte  method;        /* STORED (for zip only) or DEFLATED */
354    int   last_flush;    /* value of flush param for previous deflate call */
355
356                /* used by deflate.c: */
357
358    uInt  w_size;        /* LZ77 window size (32K by default) */
359    uInt  w_bits;        /* log2(w_size)  (8..16) */
360    uInt  w_mask;        /* w_size - 1 */
361
362    Bytef *window;
363    /* Sliding window. Input bytes are read into the second half of the window,
364     * and move to the first half later to keep a dictionary of at least wSize
365     * bytes. With this organization, matches are limited to a distance of
366     * wSize-MAX_MATCH bytes, but this ensures that IO is always
367     * performed with a length multiple of the block size. Also, it limits
368     * the window size to 64K, which is quite useful on MSDOS.
369     * To do: use the user input buffer as sliding window.
370     */
371
372    ulg window_size;
373    /* Actual size of window: 2*wSize, except when the user input buffer
374     * is directly used as sliding window.
375     */
376
377    Posf *prev;
378    /* Link to older string with same hash index. To limit the size of this
379     * array to 64K, this link is maintained only for the last 32K strings.
380     * An index in this array is thus a window index modulo 32K.
381     */
382
383    Posf *head; /* Heads of the hash chains or NIL. */
384
385    uInt  ins_h;          /* hash index of string to be inserted */
386    uInt  hash_size;      /* number of elements in hash table */
387    uInt  hash_bits;      /* log2(hash_size) */
388    uInt  hash_mask;      /* hash_size-1 */
389
390    uInt  hash_shift;
391    /* Number of bits by which ins_h must be shifted at each input
392     * step. It must be such that after MIN_MATCH steps, the oldest
393     * byte no longer takes part in the hash key, that is:
394     *   hash_shift * MIN_MATCH >= hash_bits
395     */
396
397    long block_start;
398    /* Window position at the beginning of the current output block. Gets
399     * negative when the window is moved backwards.
400     */
401
402    uInt match_length;           /* length of best match */
403    IPos prev_match;             /* previous match */
404    int match_available;         /* set if previous match exists */
405    uInt strstart;               /* start of string to insert */
406    uInt match_start;            /* start of matching string */
407    uInt lookahead;              /* number of valid bytes ahead in window */
408
409    uInt prev_length;
410    /* Length of the best match at previous step. Matches not greater than this
411     * are discarded. This is used in the lazy match evaluation.
412     */
413
414    uInt max_chain_length;
415    /* To speed up deflation, hash chains are never searched beyond this
416     * length.  A higher limit improves compression ratio but degrades the
417     * speed.
418     */
419
420    uInt max_lazy_match;
421    /* Attempt to find a better match only when the current match is strictly
422     * smaller than this value. This mechanism is used only for compression
423     * levels >= 4.
424     */
425#   define max_insert_length  max_lazy_match
426    /* Insert new strings in the hash table only if the match length is not
427     * greater than this length. This saves time but degrades compression.
428     * max_insert_length is used only for compression levels <= 3.
429     */
430
431    int level;    /* compression level (1..9) */
432    int strategy; /* favor or force Huffman coding*/
433
434    uInt good_match;
435    /* Use a faster search when the previous match is longer than this */
436
437    int nice_match; /* Stop searching when current match exceeds this */
438
439                /* used by trees.c: */
440    /* Didn't use ct_data typedef below to supress compiler warning */
441    struct ct_data_s dyn_ltree[HEAP_SIZE];   /* literal and length tree */
442    struct ct_data_s dyn_dtree[2*D_CODES+1]; /* distance tree */
443    struct ct_data_s bl_tree[2*BL_CODES+1];  /* Huffman tree for bit lengths */
444
445    struct tree_desc_s l_desc;               /* desc. for literal tree */
446    struct tree_desc_s d_desc;               /* desc. for distance tree */
447    struct tree_desc_s bl_desc;              /* desc. for bit length tree */
448
449    ush bl_count[MAX_BITS+1];
450    /* number of codes at each bit length for an optimal tree */
451
452    int heap[2*L_CODES+1];      /* heap used to build the Huffman trees */
453    int heap_len;               /* number of elements in the heap */
454    int heap_max;               /* element of largest frequency */
455    /* The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used.
456     * The same heap array is used to build all trees.
457     */
458
459    uch depth[2*L_CODES+1];
460    /* Depth of each subtree used as tie breaker for trees of equal frequency
461     */
462
463    uchf *l_buf;          /* buffer for literals or lengths */
464
465    uInt  lit_bufsize;
466    /* Size of match buffer for literals/lengths.  There are 4 reasons for
467     * limiting lit_bufsize to 64K:
468     *   - frequencies can be kept in 16 bit counters
469     *   - if compression is not successful for the first block, all input
470     *     data is still in the window so we can still emit a stored block even
471     *     when input comes from standard input.  (This can also be done for
472     *     all blocks if lit_bufsize is not greater than 32K.)
473     *   - if compression is not successful for a file smaller than 64K, we can
474     *     even emit a stored file instead of a stored block (saving 5 bytes).
475     *     This is applicable only for zip (not gzip or zlib).
476     *   - creating new Huffman trees less frequently may not provide fast
477     *     adaptation to changes in the input data statistics. (Take for
478     *     example a binary file with poorly compressible code followed by
479     *     a highly compressible string table.) Smaller buffer sizes give
480     *     fast adaptation but have of course the overhead of transmitting
481     *     trees more frequently.
482     *   - I can't count above 4
483     */
484
485    uInt last_lit;      /* running index in l_buf */
486
487    ushf *d_buf;
488    /* Buffer for distances. To simplify the code, d_buf and l_buf have
489     * the same number of elements. To use different lengths, an extra flag
490     * array would be necessary.
491     */
492
493    ulg opt_len;        /* bit length of current block with optimal trees */
494    ulg static_len;     /* bit length of current block with static trees */
495    ulg compressed_len; /* total bit length of compressed file */
496    uInt matches;       /* number of string matches in current block */
497    int last_eob_len;   /* bit length of EOB code for last block */
498
499#ifdef DEBUG_ZLIB
500    ulg bits_sent;      /* bit length of the compressed data */
501#endif
502
503    ush bi_buf;
504    /* Output buffer. bits are inserted starting at the bottom (least
505     * significant bits).
506     */
507    int bi_valid;
508    /* Number of valid bits in bi_buf.  All bits above the last valid bit
509     * are always zero.
510     */
511
512} FAR deflate_state;
513
514/* Output a byte on the stream.
515 * IN assertion: there is enough room in pending_buf.
516 */
517#define put_byte(s, c) {s->pending_buf[s->pending++] = (c);}
518
519
520#define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1)
521/* Minimum amount of lookahead, except at the end of the input file.
522 * See deflate.c for comments about the MIN_MATCH+1.
523 */
524
525#define MAX_DIST(s)  ((s)->w_size-MIN_LOOKAHEAD)
526/* In order to simplify the code, particularly on 16 bit machines, match
527 * distances are limited to MAX_DIST instead of WSIZE.
528 */
529
530        /* in trees.c */
531void _tr_init         OF((deflate_state *s));
532int  _tr_tally        OF((deflate_state *s, unsigned dist, unsigned lc));
533ulg  _tr_flush_block  OF((deflate_state *s, charf *buf, ulg stored_len,
534			  int eof));
535void _tr_align        OF((deflate_state *s));
536void _tr_stored_block OF((deflate_state *s, charf *buf, ulg stored_len,
537                          int eof));
538void _tr_stored_type_only OF((deflate_state *));
539
540#endif
541/* --- deflate.h */
542
543/* +++ deflate.c */
544/* deflate.c -- compress data using the deflation algorithm
545 * Copyright (C) 1995-1996 Jean-loup Gailly.
546 * For conditions of distribution and use, see copyright notice in zlib.h
547 */
548
549/*
550 *  ALGORITHM
551 *
552 *      The "deflation" process depends on being able to identify portions
553 *      of the input text which are identical to earlier input (within a
554 *      sliding window trailing behind the input currently being processed).
555 *
556 *      The most straightforward technique turns out to be the fastest for
557 *      most input files: try all possible matches and select the longest.
558 *      The key feature of this algorithm is that insertions into the string
559 *      dictionary are very simple and thus fast, and deletions are avoided
560 *      completely. Insertions are performed at each input character, whereas
561 *      string matches are performed only when the previous match ends. So it
562 *      is preferable to spend more time in matches to allow very fast string
563 *      insertions and avoid deletions. The matching algorithm for small
564 *      strings is inspired from that of Rabin & Karp. A brute force approach
565 *      is used to find longer strings when a small match has been found.
566 *      A similar algorithm is used in comic (by Jan-Mark Wams) and freeze
567 *      (by Leonid Broukhis).
568 *         A previous version of this file used a more sophisticated algorithm
569 *      (by Fiala and Greene) which is guaranteed to run in linear amortized
570 *      time, but has a larger average cost, uses more memory and is patented.
571 *      However the F&G algorithm may be faster for some highly redundant
572 *      files if the parameter max_chain_length (described below) is too large.
573 *
574 *  ACKNOWLEDGEMENTS
575 *
576 *      The idea of lazy evaluation of matches is due to Jan-Mark Wams, and
577 *      I found it in 'freeze' written by Leonid Broukhis.
578 *      Thanks to many people for bug reports and testing.
579 *
580 *  REFERENCES
581 *
582 *      Deutsch, L.P.,"DEFLATE Compressed Data Format Specification".
583 *      Available in ftp://ds.internic.net/rfc/rfc1951.txt
584 *
585 *      A description of the Rabin and Karp algorithm is given in the book
586 *         "Algorithms" by R. Sedgewick, Addison-Wesley, p252.
587 *
588 *      Fiala,E.R., and Greene,D.H.
589 *         Data Compression with Finite Windows, Comm.ACM, 32,4 (1989) 490-595
590 *
591 */
592
593/* From: deflate.c,v 1.15 1996/07/24 13:40:58 me Exp $ */
594
595/* #include "deflate.h" */
596
597char deflate_copyright[] = " deflate 1.0.4 Copyright 1995-1996 Jean-loup Gailly ";
598/*
599  If you use the zlib library in a product, an acknowledgment is welcome
600  in the documentation of your product. If for some reason you cannot
601  include such an acknowledgment, I would appreciate that you keep this
602  copyright string in the executable of your product.
603 */
604
605/* ===========================================================================
606 *  Function prototypes.
607 */
608typedef enum {
609    need_more,      /* block not completed, need more input or more output */
610    block_done,     /* block flush performed */
611    finish_started, /* finish started, need only more output at next deflate */
612    finish_done     /* finish done, accept no more input or output */
613} block_state;
614
615typedef block_state (*compress_func) OF((deflate_state *s, int flush));
616/* Compression function. Returns the block state after the call. */
617
618local void fill_window    OF((deflate_state *s));
619local block_state deflate_stored OF((deflate_state *s, int flush));
620local block_state deflate_fast   OF((deflate_state *s, int flush));
621local block_state deflate_slow   OF((deflate_state *s, int flush));
622local void lm_init        OF((deflate_state *s));
623local void putShortMSB    OF((deflate_state *s, uInt b));
624local void flush_pending  OF((z_streamp strm));
625local int read_buf        OF((z_streamp strm, charf *buf, unsigned size));
626#ifdef ASMV
627      void match_init OF((void)); /* asm code initialization */
628      uInt longest_match  OF((deflate_state *s, IPos cur_match));
629#else
630local uInt longest_match  OF((deflate_state *s, IPos cur_match));
631#endif
632
633#ifdef DEBUG_ZLIB
634local  void check_match OF((deflate_state *s, IPos start, IPos match,
635                            int length));
636#endif
637
638/* ===========================================================================
639 * Local data
640 */
641
642#define NIL 0
643/* Tail of hash chains */
644
645#ifndef TOO_FAR
646#  define TOO_FAR 4096
647#endif
648/* Matches of length 3 are discarded if their distance exceeds TOO_FAR */
649
650#define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1)
651/* Minimum amount of lookahead, except at the end of the input file.
652 * See deflate.c for comments about the MIN_MATCH+1.
653 */
654
655/* Values for max_lazy_match, good_match and max_chain_length, depending on
656 * the desired pack level (0..9). The values given below have been tuned to
657 * exclude worst case performance for pathological files. Better values may be
658 * found for specific files.
659 */
660typedef struct config_s {
661   ush good_length; /* reduce lazy search above this match length */
662   ush max_lazy;    /* do not perform lazy search above this match length */
663   ush nice_length; /* quit search above this match length */
664   ush max_chain;
665   compress_func func;
666} config;
667
668local config configuration_table[10] = {
669/*      good lazy nice chain */
670/* 0 */ {0,    0,  0,    0, deflate_stored},  /* store only */
671/* 1 */ {4,    4,  8,    4, deflate_fast}, /* maximum speed, no lazy matches */
672/* 2 */ {4,    5, 16,    8, deflate_fast},
673/* 3 */ {4,    6, 32,   32, deflate_fast},
674
675/* 4 */ {4,    4, 16,   16, deflate_slow},  /* lazy matches */
676/* 5 */ {8,   16, 32,   32, deflate_slow},
677/* 6 */ {8,   16, 128, 128, deflate_slow},
678/* 7 */ {8,   32, 128, 256, deflate_slow},
679/* 8 */ {32, 128, 258, 1024, deflate_slow},
680/* 9 */ {32, 258, 258, 4096, deflate_slow}}; /* maximum compression */
681
682/* Note: the deflate() code requires max_lazy >= MIN_MATCH and max_chain >= 4
683 * For deflate_fast() (levels <= 3) good is ignored and lazy has a different
684 * meaning.
685 */
686
687#define EQUAL 0
688/* result of memcmp for equal strings */
689
690#ifndef NO_DUMMY_DECL
691struct static_tree_desc_s {int dummy;}; /* for buggy compilers */
692#endif
693
694/* ===========================================================================
695 * Update a hash value with the given input byte
696 * IN  assertion: all calls to to UPDATE_HASH are made with consecutive
697 *    input characters, so that a running hash key can be computed from the
698 *    previous key instead of complete recalculation each time.
699 */
700#define UPDATE_HASH(s,h,c) (h = (((h)<<s->hash_shift) ^ (c)) & s->hash_mask)
701
702
703/* ===========================================================================
704 * Insert string str in the dictionary and set match_head to the previous head
705 * of the hash chain (the most recent string with same hash key). Return
706 * the previous length of the hash chain.
707 * IN  assertion: all calls to to INSERT_STRING are made with consecutive
708 *    input characters and the first MIN_MATCH bytes of str are valid
709 *    (except for the last MIN_MATCH-1 bytes of the input file).
710 */
711#define INSERT_STRING(s, str, match_head) \
712   (UPDATE_HASH(s, s->ins_h, s->window[(str) + (MIN_MATCH-1)]), \
713    s->prev[(str) & s->w_mask] = match_head = s->head[s->ins_h], \
714    s->head[s->ins_h] = (Pos)(str))
715
716/* ===========================================================================
717 * Initialize the hash table (avoiding 64K overflow for 16 bit systems).
718 * prev[] will be initialized on the fly.
719 */
720#define CLEAR_HASH(s) \
721    s->head[s->hash_size-1] = NIL; \
722    zmemzero((charf *)s->head, (unsigned)(s->hash_size-1)*sizeof(*s->head));
723
724/* ========================================================================= */
725int deflateInit_(strm, level, version, stream_size)
726    z_streamp strm;
727    int level;
728    const char *version;
729    int stream_size;
730{
731    return deflateInit2_(strm, level, Z_DEFLATED, MAX_WBITS, DEF_MEM_LEVEL,
732			 Z_DEFAULT_STRATEGY, version, stream_size);
733    /* To do: ignore strm->next_in if we use it as window */
734}
735
736/* ========================================================================= */
737int deflateInit2_(strm, level, method, windowBits, memLevel, strategy,
738		  version, stream_size)
739    z_streamp strm;
740    int  level;
741    int  method;
742    int  windowBits;
743    int  memLevel;
744    int  strategy;
745    const char *version;
746    int stream_size;
747{
748    deflate_state *s;
749    int noheader = 0;
750    static char* my_version = ZLIB_VERSION;
751
752    ushf *overlay;
753    /* We overlay pending_buf and d_buf+l_buf. This works since the average
754     * output size for (length,distance) codes is <= 24 bits.
755     */
756
757    if (version == Z_NULL || version[0] != my_version[0] ||
758        stream_size != sizeof(z_stream)) {
759	return Z_VERSION_ERROR;
760    }
761    if (strm == Z_NULL) return Z_STREAM_ERROR;
762
763    strm->msg = Z_NULL;
764#ifndef NO_ZCFUNCS
765    if (strm->zalloc == Z_NULL) {
766	strm->zalloc = zcalloc;
767	strm->opaque = (voidpf)0;
768    }
769    if (strm->zfree == Z_NULL) strm->zfree = zcfree;
770#endif
771
772    if (level == Z_DEFAULT_COMPRESSION) level = 6;
773
774    if (windowBits < 0) { /* undocumented feature: suppress zlib header */
775        noheader = 1;
776        windowBits = -windowBits;
777    }
778    if (memLevel < 1 || memLevel > MAX_MEM_LEVEL || method != Z_DEFLATED ||
779        windowBits < 8 || windowBits > 15 || level < 0 || level > 9 ||
780	strategy < 0 || strategy > Z_HUFFMAN_ONLY) {
781        return Z_STREAM_ERROR;
782    }
783    s = (deflate_state *) ZALLOC(strm, 1, sizeof(deflate_state));
784    if (s == Z_NULL) return Z_MEM_ERROR;
785    strm->state = (struct internal_state FAR *)s;
786    s->strm = strm;
787
788    s->noheader = noheader;
789    s->w_bits = windowBits;
790    s->w_size = 1 << s->w_bits;
791    s->w_mask = s->w_size - 1;
792
793    s->hash_bits = memLevel + 7;
794    s->hash_size = 1 << s->hash_bits;
795    s->hash_mask = s->hash_size - 1;
796    s->hash_shift =  ((s->hash_bits+MIN_MATCH-1)/MIN_MATCH);
797
798    s->window = (Bytef *) ZALLOC(strm, s->w_size, 2*sizeof(Byte));
799    s->prev   = (Posf *)  ZALLOC(strm, s->w_size, sizeof(Pos));
800    s->head   = (Posf *)  ZALLOC(strm, s->hash_size, sizeof(Pos));
801
802    s->lit_bufsize = 1 << (memLevel + 6); /* 16K elements by default */
803
804    overlay = (ushf *) ZALLOC(strm, s->lit_bufsize, sizeof(ush)+2);
805    s->pending_buf = (uchf *) overlay;
806    s->pending_buf_size = (ulg)s->lit_bufsize * (sizeof(ush)+2L);
807
808    if (s->window == Z_NULL || s->prev == Z_NULL || s->head == Z_NULL ||
809        s->pending_buf == Z_NULL) {
810        strm->msg = (const char*)ERR_MSG(Z_MEM_ERROR);
811        deflateEnd (strm);
812        return Z_MEM_ERROR;
813    }
814    s->d_buf = overlay + s->lit_bufsize/sizeof(ush);
815    s->l_buf = s->pending_buf + (1+sizeof(ush))*s->lit_bufsize;
816
817    s->level = level;
818    s->strategy = strategy;
819    s->method = (Byte)method;
820
821    return deflateReset(strm);
822}
823
824/* ========================================================================= */
825int deflateSetDictionary (strm, dictionary, dictLength)
826    z_streamp strm;
827    const Bytef *dictionary;
828    uInt  dictLength;
829{
830    deflate_state *s;
831    uInt length = dictLength;
832    uInt n;
833    IPos hash_head = 0;
834
835    if (strm == Z_NULL || strm->state == Z_NULL || dictionary == Z_NULL)
836	return Z_STREAM_ERROR;
837
838    s = (deflate_state *) strm->state;
839    if (s->status != INIT_STATE) return Z_STREAM_ERROR;
840
841    strm->adler = adler32(strm->adler, dictionary, dictLength);
842
843    if (length < MIN_MATCH) return Z_OK;
844    if (length > MAX_DIST(s)) {
845	length = MAX_DIST(s);
846#ifndef USE_DICT_HEAD
847	dictionary += dictLength - length; /* use the tail of the dictionary */
848#endif
849    }
850    zmemcpy((charf *)s->window, dictionary, length);
851    s->strstart = length;
852    s->block_start = (long)length;
853
854    /* Insert all strings in the hash table (except for the last two bytes).
855     * s->lookahead stays null, so s->ins_h will be recomputed at the next
856     * call of fill_window.
857     */
858    s->ins_h = s->window[0];
859    UPDATE_HASH(s, s->ins_h, s->window[1]);
860    for (n = 0; n <= length - MIN_MATCH; n++) {
861	INSERT_STRING(s, n, hash_head);
862    }
863    if (hash_head) hash_head = 0;  /* to make compiler happy */
864    return Z_OK;
865}
866
867/* ========================================================================= */
868int deflateReset (strm)
869    z_streamp strm;
870{
871    deflate_state *s;
872
873    if (strm == Z_NULL || strm->state == Z_NULL ||
874        strm->zalloc == Z_NULL || strm->zfree == Z_NULL) return Z_STREAM_ERROR;
875
876    strm->total_in = strm->total_out = 0;
877    strm->msg = Z_NULL; /* use zfree if we ever allocate msg dynamically */
878    strm->data_type = Z_UNKNOWN;
879
880    s = (deflate_state *)strm->state;
881    s->pending = 0;
882    s->pending_out = s->pending_buf;
883
884    if (s->noheader < 0) {
885        s->noheader = 0; /* was set to -1 by deflate(..., Z_FINISH); */
886    }
887    s->status = s->noheader ? BUSY_STATE : INIT_STATE;
888    strm->adler = 1;
889    s->last_flush = Z_NO_FLUSH;
890
891    _tr_init(s);
892    lm_init(s);
893
894    return Z_OK;
895}
896
897/* ========================================================================= */
898int deflateParams(strm, level, strategy)
899    z_streamp strm;
900    int level;
901    int strategy;
902{
903    deflate_state *s;
904    compress_func func;
905    int err = Z_OK;
906
907    if (strm == Z_NULL || strm->state == Z_NULL) return Z_STREAM_ERROR;
908    s = (deflate_state *) strm->state;
909
910    if (level == Z_DEFAULT_COMPRESSION) {
911	level = 6;
912    }
913    if (level < 0 || level > 9 || strategy < 0 || strategy > Z_HUFFMAN_ONLY) {
914	return Z_STREAM_ERROR;
915    }
916    func = configuration_table[s->level].func;
917
918    if (func != configuration_table[level].func && strm->total_in != 0) {
919	/* Flush the last buffer: */
920	err = deflate(strm, Z_PARTIAL_FLUSH);
921    }
922    if (s->level != level) {
923	s->level = level;
924	s->max_lazy_match   = configuration_table[level].max_lazy;
925	s->good_match       = configuration_table[level].good_length;
926	s->nice_match       = configuration_table[level].nice_length;
927	s->max_chain_length = configuration_table[level].max_chain;
928    }
929    s->strategy = strategy;
930    return err;
931}
932
933/* =========================================================================
934 * Put a short in the pending buffer. The 16-bit value is put in MSB order.
935 * IN assertion: the stream state is correct and there is enough room in
936 * pending_buf.
937 */
938local void putShortMSB (s, b)
939    deflate_state *s;
940    uInt b;
941{
942    put_byte(s, (Byte)(b >> 8));
943    put_byte(s, (Byte)(b & 0xff));
944}
945
946/* =========================================================================
947 * Flush as much pending output as possible. All deflate() output goes
948 * through this function so some applications may wish to modify it
949 * to avoid allocating a large strm->next_out buffer and copying into it.
950 * (See also read_buf()).
951 */
952local void flush_pending(strm)
953    z_streamp strm;
954{
955    deflate_state *s = (deflate_state *) strm->state;
956    unsigned len = s->pending;
957
958    if (len > strm->avail_out) len = strm->avail_out;
959    if (len == 0) return;
960
961    if (strm->next_out != Z_NULL) {
962	zmemcpy(strm->next_out, s->pending_out, len);
963	strm->next_out += len;
964    }
965    s->pending_out += len;
966    strm->total_out += len;
967    strm->avail_out  -= len;
968    s->pending -= len;
969    if (s->pending == 0) {
970        s->pending_out = s->pending_buf;
971    }
972}
973
974/* ========================================================================= */
975int deflate (strm, flush)
976    z_streamp strm;
977    int flush;
978{
979    int old_flush; /* value of flush param for previous deflate call */
980    deflate_state *s;
981
982    if (strm == Z_NULL || strm->state == Z_NULL ||
983	flush > Z_FINISH || flush < 0) {
984        return Z_STREAM_ERROR;
985    }
986    s = (deflate_state *) strm->state;
987
988    if ((strm->next_in == Z_NULL && strm->avail_in != 0) ||
989	(s->status == FINISH_STATE && flush != Z_FINISH)) {
990        ERR_RETURN(strm, Z_STREAM_ERROR);
991    }
992    if (strm->avail_out == 0) ERR_RETURN(strm, Z_BUF_ERROR);
993
994    s->strm = strm; /* just in case */
995    old_flush = s->last_flush;
996    s->last_flush = flush;
997
998    /* Write the zlib header */
999    if (s->status == INIT_STATE) {
1000
1001        uInt header = (Z_DEFLATED + ((s->w_bits-8)<<4)) << 8;
1002        uInt level_flags = (s->level-1) >> 1;
1003
1004        if (level_flags > 3) level_flags = 3;
1005        header |= (level_flags << 6);
1006	if (s->strstart != 0) header |= PRESET_DICT;
1007        header += 31 - (header % 31);
1008
1009        s->status = BUSY_STATE;
1010        putShortMSB(s, header);
1011
1012	/* Save the adler32 of the preset dictionary: */
1013	if (s->strstart != 0) {
1014	    putShortMSB(s, (uInt)(strm->adler >> 16));
1015	    putShortMSB(s, (uInt)(strm->adler & 0xffff));
1016	}
1017	strm->adler = 1L;
1018    }
1019
1020    /* Flush as much pending output as possible */
1021    if (s->pending != 0) {
1022        flush_pending(strm);
1023        if (strm->avail_out == 0) {
1024	    /* Since avail_out is 0, deflate will be called again with
1025	     * more output space, but possibly with both pending and
1026	     * avail_in equal to zero. There won't be anything to do,
1027	     * but this is not an error situation so make sure we
1028	     * return OK instead of BUF_ERROR at next call of deflate:
1029             */
1030	    s->last_flush = -1;
1031	    return Z_OK;
1032	}
1033
1034    /* Make sure there is something to do and avoid duplicate consecutive
1035     * flushes. For repeated and useless calls with Z_FINISH, we keep
1036     * returning Z_STREAM_END instead of Z_BUFF_ERROR.
1037     */
1038    } else if (strm->avail_in == 0 && flush <= old_flush &&
1039	       flush != Z_FINISH) {
1040        ERR_RETURN(strm, Z_BUF_ERROR);
1041    }
1042
1043    /* User must not provide more input after the first FINISH: */
1044    if (s->status == FINISH_STATE && strm->avail_in != 0) {
1045        ERR_RETURN(strm, Z_BUF_ERROR);
1046    }
1047
1048    /* Start a new block or continue the current one.
1049     */
1050    if (strm->avail_in != 0 || s->lookahead != 0 ||
1051        (flush != Z_NO_FLUSH && s->status != FINISH_STATE)) {
1052        block_state bstate;
1053
1054	bstate = (*(configuration_table[s->level].func))(s, flush);
1055
1056        if (bstate == finish_started || bstate == finish_done) {
1057            s->status = FINISH_STATE;
1058        }
1059        if (bstate == need_more || bstate == finish_started) {
1060	    if (strm->avail_out == 0) {
1061	        s->last_flush = -1; /* avoid BUF_ERROR next call, see above */
1062	    }
1063	    return Z_OK;
1064	    /* If flush != Z_NO_FLUSH && avail_out == 0, the next call
1065	     * of deflate should use the same flush parameter to make sure
1066	     * that the flush is complete. So we don't have to output an
1067	     * empty block here, this will be done at next call. This also
1068	     * ensures that for a very small output buffer, we emit at most
1069	     * one empty block.
1070	     */
1071	}
1072        if (bstate == block_done) {
1073            if (flush == Z_PARTIAL_FLUSH) {
1074                _tr_align(s);
1075	    } else if (flush == Z_PACKET_FLUSH) {
1076		/* Output just the 3-bit `stored' block type value,
1077		   but not a zero length. */
1078		_tr_stored_type_only(s);
1079            } else { /* FULL_FLUSH or SYNC_FLUSH */
1080                _tr_stored_block(s, (char*)0, 0L, 0);
1081                /* For a full flush, this empty block will be recognized
1082                 * as a special marker by inflate_sync().
1083                 */
1084                if (flush == Z_FULL_FLUSH) {
1085                    CLEAR_HASH(s);             /* forget history */
1086                }
1087            }
1088            flush_pending(strm);
1089	    if (strm->avail_out == 0) {
1090	      s->last_flush = -1; /* avoid BUF_ERROR at next call, see above */
1091	      return Z_OK;
1092	    }
1093        }
1094    }
1095    Assert(strm->avail_out > 0, "bug2");
1096
1097    if (flush != Z_FINISH) return Z_OK;
1098    if (s->noheader) return Z_STREAM_END;
1099
1100    /* Write the zlib trailer (adler32) */
1101    putShortMSB(s, (uInt)(strm->adler >> 16));
1102    putShortMSB(s, (uInt)(strm->adler & 0xffff));
1103    flush_pending(strm);
1104    /* If avail_out is zero, the application will call deflate again
1105     * to flush the rest.
1106     */
1107    s->noheader = -1; /* write the trailer only once! */
1108    return s->pending != 0 ? Z_OK : Z_STREAM_END;
1109}
1110
1111/* ========================================================================= */
1112int deflateEnd (strm)
1113    z_streamp strm;
1114{
1115    int status;
1116    deflate_state *s;
1117
1118    if (strm == Z_NULL || strm->state == Z_NULL) return Z_STREAM_ERROR;
1119    s = (deflate_state *) strm->state;
1120
1121    status = s->status;
1122    if (status != INIT_STATE && status != BUSY_STATE &&
1123	status != FINISH_STATE) {
1124      return Z_STREAM_ERROR;
1125    }
1126
1127    /* Deallocate in reverse order of allocations: */
1128    TRY_FREE(strm, s->pending_buf);
1129    TRY_FREE(strm, s->head);
1130    TRY_FREE(strm, s->prev);
1131    TRY_FREE(strm, s->window);
1132
1133    ZFREE(strm, s);
1134    strm->state = Z_NULL;
1135
1136    return status == BUSY_STATE ? Z_DATA_ERROR : Z_OK;
1137}
1138
1139/* =========================================================================
1140 * Copy the source state to the destination state.
1141 */
1142int deflateCopy (dest, source)
1143    z_streamp dest;
1144    z_streamp source;
1145{
1146    deflate_state *ds;
1147    deflate_state *ss;
1148    ushf *overlay;
1149
1150    if (source == Z_NULL || dest == Z_NULL || source->state == Z_NULL)
1151        return Z_STREAM_ERROR;
1152    ss = (deflate_state *) source->state;
1153
1154    zmemcpy(dest, source, sizeof(*dest));
1155
1156    ds = (deflate_state *) ZALLOC(dest, 1, sizeof(deflate_state));
1157    if (ds == Z_NULL) return Z_MEM_ERROR;
1158    dest->state = (struct internal_state FAR *) ds;
1159    zmemcpy(ds, ss, sizeof(*ds));
1160    ds->strm = dest;
1161
1162    ds->window = (Bytef *) ZALLOC(dest, ds->w_size, 2*sizeof(Byte));
1163    ds->prev   = (Posf *)  ZALLOC(dest, ds->w_size, sizeof(Pos));
1164    ds->head   = (Posf *)  ZALLOC(dest, ds->hash_size, sizeof(Pos));
1165    overlay = (ushf *) ZALLOC(dest, ds->lit_bufsize, sizeof(ush)+2);
1166    ds->pending_buf = (uchf *) overlay;
1167
1168    if (ds->window == Z_NULL || ds->prev == Z_NULL || ds->head == Z_NULL ||
1169        ds->pending_buf == Z_NULL) {
1170        deflateEnd (dest);
1171        return Z_MEM_ERROR;
1172    }
1173    /* ??? following zmemcpy doesn't work for 16-bit MSDOS */
1174    zmemcpy(ds->window, ss->window, ds->w_size * 2 * sizeof(Byte));
1175    zmemcpy(ds->prev, ss->prev, ds->w_size * sizeof(Pos));
1176    zmemcpy(ds->head, ss->head, ds->hash_size * sizeof(Pos));
1177    zmemcpy(ds->pending_buf, ss->pending_buf, (uInt)ds->pending_buf_size);
1178
1179    ds->pending_out = ds->pending_buf + (ss->pending_out - ss->pending_buf);
1180    ds->d_buf = overlay + ds->lit_bufsize/sizeof(ush);
1181    ds->l_buf = ds->pending_buf + (1+sizeof(ush))*ds->lit_bufsize;
1182
1183    ds->l_desc.dyn_tree = ds->dyn_ltree;
1184    ds->d_desc.dyn_tree = ds->dyn_dtree;
1185    ds->bl_desc.dyn_tree = ds->bl_tree;
1186
1187    return Z_OK;
1188}
1189
1190/* ===========================================================================
1191 * Return the number of bytes of output which are immediately available
1192 * for output from the decompressor.
1193 */
1194int deflateOutputPending (strm)
1195    z_streamp strm;
1196{
1197    if (strm == Z_NULL || strm->state == Z_NULL) return 0;
1198
1199    return ((deflate_state *)(strm->state))->pending;
1200}
1201
1202/* ===========================================================================
1203 * Read a new buffer from the current input stream, update the adler32
1204 * and total number of bytes read.  All deflate() input goes through
1205 * this function so some applications may wish to modify it to avoid
1206 * allocating a large strm->next_in buffer and copying from it.
1207 * (See also flush_pending()).
1208 */
1209local int read_buf(strm, buf, size)
1210    z_streamp strm;
1211    charf *buf;
1212    unsigned size;
1213{
1214    unsigned len = strm->avail_in;
1215
1216    if (len > size) len = size;
1217    if (len == 0) return 0;
1218
1219    strm->avail_in  -= len;
1220
1221    if (!((deflate_state *)(strm->state))->noheader) {
1222        strm->adler = adler32(strm->adler, strm->next_in, len);
1223    }
1224    zmemcpy(buf, strm->next_in, len);
1225    strm->next_in  += len;
1226    strm->total_in += len;
1227
1228    return (int)len;
1229}
1230
1231/* ===========================================================================
1232 * Initialize the "longest match" routines for a new zlib stream
1233 */
1234local void lm_init (s)
1235    deflate_state *s;
1236{
1237    s->window_size = (ulg)2L*s->w_size;
1238
1239    CLEAR_HASH(s);
1240
1241    /* Set the default configuration parameters:
1242     */
1243    s->max_lazy_match   = configuration_table[s->level].max_lazy;
1244    s->good_match       = configuration_table[s->level].good_length;
1245    s->nice_match       = configuration_table[s->level].nice_length;
1246    s->max_chain_length = configuration_table[s->level].max_chain;
1247
1248    s->strstart = 0;
1249    s->block_start = 0L;
1250    s->lookahead = 0;
1251    s->match_length = s->prev_length = MIN_MATCH-1;
1252    s->match_available = 0;
1253    s->ins_h = 0;
1254#ifdef ASMV
1255    match_init(); /* initialize the asm code */
1256#endif
1257}
1258
1259/* ===========================================================================
1260 * Set match_start to the longest match starting at the given string and
1261 * return its length. Matches shorter or equal to prev_length are discarded,
1262 * in which case the result is equal to prev_length and match_start is
1263 * garbage.
1264 * IN assertions: cur_match is the head of the hash chain for the current
1265 *   string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1
1266 * OUT assertion: the match length is not greater than s->lookahead.
1267 */
1268#ifndef ASMV
1269/* For 80x86 and 680x0, an optimized version will be provided in match.asm or
1270 * match.S. The code will be functionally equivalent.
1271 */
1272local uInt longest_match(s, cur_match)
1273    deflate_state *s;
1274    IPos cur_match;                             /* current match */
1275{
1276    unsigned chain_length = s->max_chain_length;/* max hash chain length */
1277    register Bytef *scan = s->window + s->strstart; /* current string */
1278    register Bytef *match;                       /* matched string */
1279    register int len;                           /* length of current match */
1280    int best_len = s->prev_length;              /* best match length so far */
1281    int nice_match = s->nice_match;             /* stop if match long enough */
1282    IPos limit = s->strstart > (IPos)MAX_DIST(s) ?
1283        s->strstart - (IPos)MAX_DIST(s) : NIL;
1284    /* Stop when cur_match becomes <= limit. To simplify the code,
1285     * we prevent matches with the string of window index 0.
1286     */
1287    Posf *prev = s->prev;
1288    uInt wmask = s->w_mask;
1289
1290#ifdef UNALIGNED_OK
1291    /* Compare two bytes at a time. Note: this is not always beneficial.
1292     * Try with and without -DUNALIGNED_OK to check.
1293     */
1294    register Bytef *strend = s->window + s->strstart + MAX_MATCH - 1;
1295    register ush scan_start = *(ushf*)scan;
1296    register ush scan_end   = *(ushf*)(scan+best_len-1);
1297#else
1298    register Bytef *strend = s->window + s->strstart + MAX_MATCH;
1299    register Byte scan_end1  = scan[best_len-1];
1300    register Byte scan_end   = scan[best_len];
1301#endif
1302
1303    /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16.
1304     * It is easy to get rid of this optimization if necessary.
1305     */
1306    Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever");
1307
1308    /* Do not waste too much time if we already have a good match: */
1309    if (s->prev_length >= s->good_match) {
1310        chain_length >>= 2;
1311    }
1312    /* Do not look for matches beyond the end of the input. This is necessary
1313     * to make deflate deterministic.
1314     */
1315    if ((uInt)nice_match > s->lookahead) nice_match = s->lookahead;
1316
1317    Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead");
1318
1319    do {
1320        Assert(cur_match < s->strstart, "no future");
1321        match = s->window + cur_match;
1322
1323        /* Skip to next match if the match length cannot increase
1324         * or if the match length is less than 2:
1325         */
1326#if (defined(UNALIGNED_OK) && MAX_MATCH == 258)
1327        /* This code assumes sizeof(unsigned short) == 2. Do not use
1328         * UNALIGNED_OK if your compiler uses a different size.
1329         */
1330        if (*(ushf*)(match+best_len-1) != scan_end ||
1331            *(ushf*)match != scan_start) continue;
1332
1333        /* It is not necessary to compare scan[2] and match[2] since they are
1334         * always equal when the other bytes match, given that the hash keys
1335         * are equal and that HASH_BITS >= 8. Compare 2 bytes at a time at
1336         * strstart+3, +5, ... up to strstart+257. We check for insufficient
1337         * lookahead only every 4th comparison; the 128th check will be made
1338         * at strstart+257. If MAX_MATCH-2 is not a multiple of 8, it is
1339         * necessary to put more guard bytes at the end of the window, or
1340         * to check more often for insufficient lookahead.
1341         */
1342        Assert(scan[2] == match[2], "scan[2]?");
1343        scan++, match++;
1344        do {
1345        } while (*(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
1346                 *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
1347                 *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
1348                 *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
1349                 scan < strend);
1350        /* The funny "do {}" generates better code on most compilers */
1351
1352        /* Here, scan <= window+strstart+257 */
1353        Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan");
1354        if (*scan == *match) scan++;
1355
1356        len = (MAX_MATCH - 1) - (int)(strend-scan);
1357        scan = strend - (MAX_MATCH-1);
1358
1359#else /* UNALIGNED_OK */
1360
1361        if (match[best_len]   != scan_end  ||
1362            match[best_len-1] != scan_end1 ||
1363            *match            != *scan     ||
1364            *++match          != scan[1])      continue;
1365
1366        /* The check at best_len-1 can be removed because it will be made
1367         * again later. (This heuristic is not always a win.)
1368         * It is not necessary to compare scan[2] and match[2] since they
1369         * are always equal when the other bytes match, given that
1370         * the hash keys are equal and that HASH_BITS >= 8.
1371         */
1372        scan += 2, match++;
1373        Assert(*scan == *match, "match[2]?");
1374
1375        /* We check for insufficient lookahead only every 8th comparison;
1376         * the 256th check will be made at strstart+258.
1377         */
1378        do {
1379        } while (*++scan == *++match && *++scan == *++match &&
1380                 *++scan == *++match && *++scan == *++match &&
1381                 *++scan == *++match && *++scan == *++match &&
1382                 *++scan == *++match && *++scan == *++match &&
1383                 scan < strend);
1384
1385        Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan");
1386
1387        len = MAX_MATCH - (int)(strend - scan);
1388        scan = strend - MAX_MATCH;
1389
1390#endif /* UNALIGNED_OK */
1391
1392        if (len > best_len) {
1393            s->match_start = cur_match;
1394            best_len = len;
1395            if (len >= nice_match) break;
1396#ifdef UNALIGNED_OK
1397            scan_end = *(ushf*)(scan+best_len-1);
1398#else
1399            scan_end1  = scan[best_len-1];
1400            scan_end   = scan[best_len];
1401#endif
1402        }
1403    } while ((cur_match = prev[cur_match & wmask]) > limit
1404             && --chain_length != 0);
1405
1406    if ((uInt)best_len <= s->lookahead) return best_len;
1407    return s->lookahead;
1408}
1409#endif /* ASMV */
1410
1411#ifdef DEBUG_ZLIB
1412/* ===========================================================================
1413 * Check that the match at match_start is indeed a match.
1414 */
1415local void check_match(s, start, match, length)
1416    deflate_state *s;
1417    IPos start, match;
1418    int length;
1419{
1420    /* check that the match is indeed a match */
1421    if (zmemcmp((charf *)s->window + match,
1422                (charf *)s->window + start, length) != EQUAL) {
1423        fprintf(stderr, " start %u, match %u, length %d\n",
1424		start, match, length);
1425        do {
1426	    fprintf(stderr, "%c%c", s->window[match++], s->window[start++]);
1427	} while (--length != 0);
1428        z_error("invalid match");
1429    }
1430    if (z_verbose > 1) {
1431        fprintf(stderr,"\\[%d,%d]", start-match, length);
1432        do { putc(s->window[start++], stderr); } while (--length != 0);
1433    }
1434}
1435#else
1436#  define check_match(s, start, match, length)
1437#endif
1438
1439/* ===========================================================================
1440 * Fill the window when the lookahead becomes insufficient.
1441 * Updates strstart and lookahead.
1442 *
1443 * IN assertion: lookahead < MIN_LOOKAHEAD
1444 * OUT assertions: strstart <= window_size-MIN_LOOKAHEAD
1445 *    At least one byte has been read, or avail_in == 0; reads are
1446 *    performed for at least two bytes (required for the zip translate_eol
1447 *    option -- not supported here).
1448 */
1449local void fill_window(s)
1450    deflate_state *s;
1451{
1452    register unsigned n, m;
1453    register Posf *p;
1454    unsigned more;    /* Amount of free space at the end of the window. */
1455    uInt wsize = s->w_size;
1456
1457    do {
1458        more = (unsigned)(s->window_size -(ulg)s->lookahead -(ulg)s->strstart);
1459
1460        /* Deal with !@#$% 64K limit: */
1461        if (more == 0 && s->strstart == 0 && s->lookahead == 0) {
1462            more = wsize;
1463
1464        } else if (more == (unsigned)(-1)) {
1465            /* Very unlikely, but possible on 16 bit machine if strstart == 0
1466             * and lookahead == 1 (input done one byte at time)
1467             */
1468            more--;
1469
1470        /* If the window is almost full and there is insufficient lookahead,
1471         * move the upper half to the lower one to make room in the upper half.
1472         */
1473        } else if (s->strstart >= wsize+MAX_DIST(s)) {
1474
1475            zmemcpy((charf *)s->window, (charf *)s->window+wsize,
1476                   (unsigned)wsize);
1477            s->match_start -= wsize;
1478            s->strstart    -= wsize; /* we now have strstart >= MAX_DIST */
1479            s->block_start -= (long) wsize;
1480
1481            /* Slide the hash table (could be avoided with 32 bit values
1482               at the expense of memory usage). We slide even when level == 0
1483               to keep the hash table consistent if we switch back to level > 0
1484               later. (Using level 0 permanently is not an optimal usage of
1485               zlib, so we don't care about this pathological case.)
1486             */
1487            n = s->hash_size;
1488            p = &s->head[n];
1489            do {
1490                m = *--p;
1491                *p = (Pos)(m >= wsize ? m-wsize : NIL);
1492            } while (--n);
1493
1494            n = wsize;
1495            p = &s->prev[n];
1496            do {
1497                m = *--p;
1498                *p = (Pos)(m >= wsize ? m-wsize : NIL);
1499                /* If n is not on any hash chain, prev[n] is garbage but
1500                 * its value will never be used.
1501                 */
1502            } while (--n);
1503            more += wsize;
1504        }
1505        if (s->strm->avail_in == 0) return;
1506
1507        /* If there was no sliding:
1508         *    strstart <= WSIZE+MAX_DIST-1 && lookahead <= MIN_LOOKAHEAD - 1 &&
1509         *    more == window_size - lookahead - strstart
1510         * => more >= window_size - (MIN_LOOKAHEAD-1 + WSIZE + MAX_DIST-1)
1511         * => more >= window_size - 2*WSIZE + 2
1512         * In the BIG_MEM or MMAP case (not yet supported),
1513         *   window_size == input_size + MIN_LOOKAHEAD  &&
1514         *   strstart + s->lookahead <= input_size => more >= MIN_LOOKAHEAD.
1515         * Otherwise, window_size == 2*WSIZE so more >= 2.
1516         * If there was sliding, more >= WSIZE. So in all cases, more >= 2.
1517         */
1518        Assert(more >= 2, "more < 2");
1519
1520        n = read_buf(s->strm, (charf *)s->window + s->strstart + s->lookahead,
1521                     more);
1522        s->lookahead += n;
1523
1524        /* Initialize the hash value now that we have some input: */
1525        if (s->lookahead >= MIN_MATCH) {
1526            s->ins_h = s->window[s->strstart];
1527            UPDATE_HASH(s, s->ins_h, s->window[s->strstart+1]);
1528#if MIN_MATCH != 3
1529            Call UPDATE_HASH() MIN_MATCH-3 more times
1530#endif
1531        }
1532        /* If the whole input has less than MIN_MATCH bytes, ins_h is garbage,
1533         * but this is not important since only literal bytes will be emitted.
1534         */
1535
1536    } while (s->lookahead < MIN_LOOKAHEAD && s->strm->avail_in != 0);
1537}
1538
1539/* ===========================================================================
1540 * Flush the current block, with given end-of-file flag.
1541 * IN assertion: strstart is set to the end of the current match.
1542 */
1543#define FLUSH_BLOCK_ONLY(s, eof) { \
1544   _tr_flush_block(s, (s->block_start >= 0L ? \
1545                   (charf *)&s->window[(unsigned)s->block_start] : \
1546                   (charf *)Z_NULL), \
1547		(ulg)((long)s->strstart - s->block_start), \
1548		(eof)); \
1549   s->block_start = s->strstart; \
1550   flush_pending(s->strm); \
1551   Tracev((stderr,"[FLUSH]")); \
1552}
1553
1554/* Same but force premature exit if necessary. */
1555#define FLUSH_BLOCK(s, eof) { \
1556   FLUSH_BLOCK_ONLY(s, eof); \
1557   if (s->strm->avail_out == 0) return (eof) ? finish_started : need_more; \
1558}
1559
1560/* ===========================================================================
1561 * Copy without compression as much as possible from the input stream, return
1562 * the current block state.
1563 * This function does not insert new strings in the dictionary since
1564 * uncompressible data is probably not useful. This function is used
1565 * only for the level=0 compression option.
1566 * NOTE: this function should be optimized to avoid extra copying from
1567 * window to pending_buf.
1568 */
1569local block_state deflate_stored(s, flush)
1570    deflate_state *s;
1571    int flush;
1572{
1573    /* Stored blocks are limited to 0xffff bytes, pending_buf is limited
1574     * to pending_buf_size, and each stored block has a 5 byte header:
1575     */
1576    ulg max_block_size = 0xffff;
1577    ulg max_start;
1578
1579    if (max_block_size > s->pending_buf_size - 5) {
1580        max_block_size = s->pending_buf_size - 5;
1581    }
1582
1583    /* Copy as much as possible from input to output: */
1584    for (;;) {
1585        /* Fill the window as much as possible: */
1586        if (s->lookahead <= 1) {
1587
1588            Assert(s->strstart < s->w_size+MAX_DIST(s) ||
1589		   s->block_start >= (long)s->w_size, "slide too late");
1590
1591            fill_window(s);
1592            if (s->lookahead == 0 && flush == Z_NO_FLUSH) return need_more;
1593
1594            if (s->lookahead == 0) break; /* flush the current block */
1595        }
1596	Assert(s->block_start >= 0L, "block gone");
1597
1598	s->strstart += s->lookahead;
1599	s->lookahead = 0;
1600
1601	/* Emit a stored block if pending_buf will be full: */
1602 	max_start = s->block_start + max_block_size;
1603        if (s->strstart == 0 || (ulg)s->strstart >= max_start) {
1604	    /* strstart == 0 is possible when wraparound on 16-bit machine */
1605	    s->lookahead = (uInt)(s->strstart - max_start);
1606	    s->strstart = (uInt)max_start;
1607            FLUSH_BLOCK(s, 0);
1608	}
1609	/* Flush if we may have to slide, otherwise block_start may become
1610         * negative and the data will be gone:
1611         */
1612        if (s->strstart - (uInt)s->block_start >= MAX_DIST(s)) {
1613            FLUSH_BLOCK(s, 0);
1614	}
1615    }
1616    FLUSH_BLOCK(s, flush == Z_FINISH);
1617    return flush == Z_FINISH ? finish_done : block_done;
1618}
1619
1620/* ===========================================================================
1621 * Compress as much as possible from the input stream, return the current
1622 * block state.
1623 * This function does not perform lazy evaluation of matches and inserts
1624 * new strings in the dictionary only for unmatched strings or for short
1625 * matches. It is used only for the fast compression options.
1626 */
1627local block_state deflate_fast(s, flush)
1628    deflate_state *s;
1629    int flush;
1630{
1631    IPos hash_head = NIL; /* head of the hash chain */
1632    int bflush;           /* set if current block must be flushed */
1633
1634    for (;;) {
1635        /* Make sure that we always have enough lookahead, except
1636         * at the end of the input file. We need MAX_MATCH bytes
1637         * for the next match, plus MIN_MATCH bytes to insert the
1638         * string following the next match.
1639         */
1640        if (s->lookahead < MIN_LOOKAHEAD) {
1641            fill_window(s);
1642            if (s->lookahead < MIN_LOOKAHEAD && flush == Z_NO_FLUSH) {
1643	        return need_more;
1644	    }
1645            if (s->lookahead == 0) break; /* flush the current block */
1646        }
1647
1648        /* Insert the string window[strstart .. strstart+2] in the
1649         * dictionary, and set hash_head to the head of the hash chain:
1650         */
1651        if (s->lookahead >= MIN_MATCH) {
1652            INSERT_STRING(s, s->strstart, hash_head);
1653        }
1654
1655        /* Find the longest match, discarding those <= prev_length.
1656         * At this point we have always match_length < MIN_MATCH
1657         */
1658        if (hash_head != NIL && s->strstart - hash_head <= MAX_DIST(s)) {
1659            /* To simplify the code, we prevent matches with the string
1660             * of window index 0 (in particular we have to avoid a match
1661             * of the string with itself at the start of the input file).
1662             */
1663            if (s->strategy != Z_HUFFMAN_ONLY) {
1664                s->match_length = longest_match (s, hash_head);
1665            }
1666            /* longest_match() sets match_start */
1667        }
1668        if (s->match_length >= MIN_MATCH) {
1669            check_match(s, s->strstart, s->match_start, s->match_length);
1670
1671            bflush = _tr_tally(s, s->strstart - s->match_start,
1672                               s->match_length - MIN_MATCH);
1673
1674            s->lookahead -= s->match_length;
1675
1676            /* Insert new strings in the hash table only if the match length
1677             * is not too large. This saves time but degrades compression.
1678             */
1679            if (s->match_length <= s->max_insert_length &&
1680                s->lookahead >= MIN_MATCH) {
1681                s->match_length--; /* string at strstart already in hash table */
1682                do {
1683                    s->strstart++;
1684                    INSERT_STRING(s, s->strstart, hash_head);
1685                    /* strstart never exceeds WSIZE-MAX_MATCH, so there are
1686                     * always MIN_MATCH bytes ahead.
1687                     */
1688                } while (--s->match_length != 0);
1689                s->strstart++;
1690            } else {
1691                s->strstart += s->match_length;
1692                s->match_length = 0;
1693                s->ins_h = s->window[s->strstart];
1694                UPDATE_HASH(s, s->ins_h, s->window[s->strstart+1]);
1695#if MIN_MATCH != 3
1696                Call UPDATE_HASH() MIN_MATCH-3 more times
1697#endif
1698                /* If lookahead < MIN_MATCH, ins_h is garbage, but it does not
1699                 * matter since it will be recomputed at next deflate call.
1700                 */
1701            }
1702        } else {
1703            /* No match, output a literal byte */
1704            Tracevv((stderr,"%c", s->window[s->strstart]));
1705            bflush = _tr_tally (s, 0, s->window[s->strstart]);
1706            s->lookahead--;
1707            s->strstart++;
1708        }
1709        if (bflush) FLUSH_BLOCK(s, 0);
1710    }
1711    FLUSH_BLOCK(s, flush == Z_FINISH);
1712    return flush == Z_FINISH ? finish_done : block_done;
1713}
1714
1715/* ===========================================================================
1716 * Same as above, but achieves better compression. We use a lazy
1717 * evaluation for matches: a match is finally adopted only if there is
1718 * no better match at the next window position.
1719 */
1720local block_state deflate_slow(s, flush)
1721    deflate_state *s;
1722    int flush;
1723{
1724    IPos hash_head = NIL;    /* head of hash chain */
1725    int bflush;              /* set if current block must be flushed */
1726
1727    /* Process the input block. */
1728    for (;;) {
1729        /* Make sure that we always have enough lookahead, except
1730         * at the end of the input file. We need MAX_MATCH bytes
1731         * for the next match, plus MIN_MATCH bytes to insert the
1732         * string following the next match.
1733         */
1734        if (s->lookahead < MIN_LOOKAHEAD) {
1735            fill_window(s);
1736            if (s->lookahead < MIN_LOOKAHEAD && flush == Z_NO_FLUSH) {
1737	        return need_more;
1738	    }
1739            if (s->lookahead == 0) break; /* flush the current block */
1740        }
1741
1742        /* Insert the string window[strstart .. strstart+2] in the
1743         * dictionary, and set hash_head to the head of the hash chain:
1744         */
1745        if (s->lookahead >= MIN_MATCH) {
1746            INSERT_STRING(s, s->strstart, hash_head);
1747        }
1748
1749        /* Find the longest match, discarding those <= prev_length.
1750         */
1751        s->prev_length = s->match_length, s->prev_match = s->match_start;
1752        s->match_length = MIN_MATCH-1;
1753
1754        if (hash_head != NIL && s->prev_length < s->max_lazy_match &&
1755            s->strstart - hash_head <= MAX_DIST(s)) {
1756            /* To simplify the code, we prevent matches with the string
1757             * of window index 0 (in particular we have to avoid a match
1758             * of the string with itself at the start of the input file).
1759             */
1760            if (s->strategy != Z_HUFFMAN_ONLY) {
1761                s->match_length = longest_match (s, hash_head);
1762            }
1763            /* longest_match() sets match_start */
1764
1765            if (s->match_length <= 5 && (s->strategy == Z_FILTERED ||
1766                 (s->match_length == MIN_MATCH &&
1767                  s->strstart - s->match_start > TOO_FAR))) {
1768
1769                /* If prev_match is also MIN_MATCH, match_start is garbage
1770                 * but we will ignore the current match anyway.
1771                 */
1772                s->match_length = MIN_MATCH-1;
1773            }
1774        }
1775        /* If there was a match at the previous step and the current
1776         * match is not better, output the previous match:
1777         */
1778        if (s->prev_length >= MIN_MATCH && s->match_length <= s->prev_length) {
1779            uInt max_insert = s->strstart + s->lookahead - MIN_MATCH;
1780            /* Do not insert strings in hash table beyond this. */
1781
1782            check_match(s, s->strstart-1, s->prev_match, s->prev_length);
1783
1784            bflush = _tr_tally(s, s->strstart -1 - s->prev_match,
1785                               s->prev_length - MIN_MATCH);
1786
1787            /* Insert in hash table all strings up to the end of the match.
1788             * strstart-1 and strstart are already inserted. If there is not
1789             * enough lookahead, the last two strings are not inserted in
1790             * the hash table.
1791             */
1792            s->lookahead -= s->prev_length-1;
1793            s->prev_length -= 2;
1794            do {
1795                if (++s->strstart <= max_insert) {
1796                    INSERT_STRING(s, s->strstart, hash_head);
1797                }
1798            } while (--s->prev_length != 0);
1799            s->match_available = 0;
1800            s->match_length = MIN_MATCH-1;
1801            s->strstart++;
1802
1803            if (bflush) FLUSH_BLOCK(s, 0);
1804
1805        } else if (s->match_available) {
1806            /* If there was no match at the previous position, output a
1807             * single literal. If there was a match but the current match
1808             * is longer, truncate the previous match to a single literal.
1809             */
1810            Tracevv((stderr,"%c", s->window[s->strstart-1]));
1811            if (_tr_tally (s, 0, s->window[s->strstart-1])) {
1812                FLUSH_BLOCK_ONLY(s, 0);
1813            }
1814            s->strstart++;
1815            s->lookahead--;
1816            if (s->strm->avail_out == 0) return need_more;
1817        } else {
1818            /* There is no previous match to compare with, wait for
1819             * the next step to decide.
1820             */
1821            s->match_available = 1;
1822            s->strstart++;
1823            s->lookahead--;
1824        }
1825    }
1826    Assert (flush != Z_NO_FLUSH, "no flush?");
1827    if (s->match_available) {
1828        Tracevv((stderr,"%c", s->window[s->strstart-1]));
1829        _tr_tally (s, 0, s->window[s->strstart-1]);
1830        s->match_available = 0;
1831    }
1832    FLUSH_BLOCK(s, flush == Z_FINISH);
1833    return flush == Z_FINISH ? finish_done : block_done;
1834}
1835/* --- deflate.c */
1836
1837/* +++ trees.c */
1838/* trees.c -- output deflated data using Huffman coding
1839 * Copyright (C) 1995-1996 Jean-loup Gailly
1840 * For conditions of distribution and use, see copyright notice in zlib.h
1841 */
1842
1843/*
1844 *  ALGORITHM
1845 *
1846 *      The "deflation" process uses several Huffman trees. The more
1847 *      common source values are represented by shorter bit sequences.
1848 *
1849 *      Each code tree is stored in a compressed form which is itself
1850 * a Huffman encoding of the lengths of all the code strings (in
1851 * ascending order by source values).  The actual code strings are
1852 * reconstructed from the lengths in the inflate process, as described
1853 * in the deflate specification.
1854 *
1855 *  REFERENCES
1856 *
1857 *      Deutsch, L.P.,"'Deflate' Compressed Data Format Specification".
1858 *      Available in ftp.uu.net:/pub/archiving/zip/doc/deflate-1.1.doc
1859 *
1860 *      Storer, James A.
1861 *          Data Compression:  Methods and Theory, pp. 49-50.
1862 *          Computer Science Press, 1988.  ISBN 0-7167-8156-5.
1863 *
1864 *      Sedgewick, R.
1865 *          Algorithms, p290.
1866 *          Addison-Wesley, 1983. ISBN 0-201-06672-6.
1867 */
1868
1869/* From: trees.c,v 1.11 1996/07/24 13:41:06 me Exp $ */
1870
1871/* #include "deflate.h" */
1872
1873#ifdef DEBUG_ZLIB
1874#  include <ctype.h>
1875#endif
1876
1877/* ===========================================================================
1878 * Constants
1879 */
1880
1881#define MAX_BL_BITS 7
1882/* Bit length codes must not exceed MAX_BL_BITS bits */
1883
1884#define END_BLOCK 256
1885/* end of block literal code */
1886
1887#define REP_3_6      16
1888/* repeat previous bit length 3-6 times (2 bits of repeat count) */
1889
1890#define REPZ_3_10    17
1891/* repeat a zero length 3-10 times  (3 bits of repeat count) */
1892
1893#define REPZ_11_138  18
1894/* repeat a zero length 11-138 times  (7 bits of repeat count) */
1895
1896local int extra_lbits[LENGTH_CODES] /* extra bits for each length code */
1897   = {0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0};
1898
1899local int extra_dbits[D_CODES] /* extra bits for each distance code */
1900   = {0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13};
1901
1902local int extra_blbits[BL_CODES]/* extra bits for each bit length code */
1903   = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7};
1904
1905local uch bl_order[BL_CODES]
1906   = {16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15};
1907/* The lengths of the bit length codes are sent in order of decreasing
1908 * probability, to avoid transmitting the lengths for unused bit length codes.
1909 */
1910
1911#define Buf_size (8 * 2*sizeof(char))
1912/* Number of bits used within bi_buf. (bi_buf might be implemented on
1913 * more than 16 bits on some systems.)
1914 */
1915
1916/* ===========================================================================
1917 * Local data. These are initialized only once.
1918 */
1919
1920local ct_data static_ltree[L_CODES+2];
1921/* The static literal tree. Since the bit lengths are imposed, there is no
1922 * need for the L_CODES extra codes used during heap construction. However
1923 * The codes 286 and 287 are needed to build a canonical tree (see _tr_init
1924 * below).
1925 */
1926
1927local ct_data static_dtree[D_CODES];
1928/* The static distance tree. (Actually a trivial tree since all codes use
1929 * 5 bits.)
1930 */
1931
1932local uch dist_code[512];
1933/* distance codes. The first 256 values correspond to the distances
1934 * 3 .. 258, the last 256 values correspond to the top 8 bits of
1935 * the 15 bit distances.
1936 */
1937
1938local uch length_code[MAX_MATCH-MIN_MATCH+1];
1939/* length code for each normalized match length (0 == MIN_MATCH) */
1940
1941local int base_length[LENGTH_CODES];
1942/* First normalized length for each code (0 = MIN_MATCH) */
1943
1944local int base_dist[D_CODES];
1945/* First normalized distance for each code (0 = distance of 1) */
1946
1947struct static_tree_desc_s {
1948    ct_data *static_tree;        /* static tree or NULL */
1949    intf    *extra_bits;         /* extra bits for each code or NULL */
1950    int     extra_base;          /* base index for extra_bits */
1951    int     elems;               /* max number of elements in the tree */
1952    int     max_length;          /* max bit length for the codes */
1953};
1954
1955local static_tree_desc  static_l_desc =
1956{static_ltree, extra_lbits, LITERALS+1, L_CODES, MAX_BITS};
1957
1958local static_tree_desc  static_d_desc =
1959{static_dtree, extra_dbits, 0,          D_CODES, MAX_BITS};
1960
1961local static_tree_desc  static_bl_desc =
1962{(ct_data *)0, extra_blbits, 0,      BL_CODES, MAX_BL_BITS};
1963
1964/* ===========================================================================
1965 * Local (static) routines in this file.
1966 */
1967
1968local void tr_static_init OF((void));
1969local void init_block     OF((deflate_state *s));
1970local void pqdownheap     OF((deflate_state *s, ct_data *tree, int k));
1971local void gen_bitlen     OF((deflate_state *s, tree_desc *desc));
1972local void gen_codes      OF((ct_data *tree, int max_code, ushf *bl_count));
1973local void build_tree     OF((deflate_state *s, tree_desc *desc));
1974local void scan_tree      OF((deflate_state *s, ct_data *tree, int max_code));
1975local void send_tree      OF((deflate_state *s, ct_data *tree, int max_code));
1976local int  build_bl_tree  OF((deflate_state *s));
1977local void send_all_trees OF((deflate_state *s, int lcodes, int dcodes,
1978                              int blcodes));
1979local void compress_block OF((deflate_state *s, ct_data *ltree,
1980                              ct_data *dtree));
1981local void set_data_type  OF((deflate_state *s));
1982local unsigned bi_reverse OF((unsigned value, int length));
1983local void bi_windup      OF((deflate_state *s));
1984local void bi_flush       OF((deflate_state *s));
1985local void copy_block     OF((deflate_state *s, charf *buf, unsigned len,
1986                              int header));
1987
1988#ifndef DEBUG_ZLIB
1989#  define send_code(s, c, tree) send_bits(s, tree[c].Code, tree[c].Len)
1990   /* Send a code of the given tree. c and tree must not have side effects */
1991
1992#else /* DEBUG_ZLIB */
1993#  define send_code(s, c, tree) \
1994     { if (verbose>2) fprintf(stderr,"\ncd %3d ",(c)); \
1995       send_bits(s, tree[c].Code, tree[c].Len); }
1996#endif
1997
1998#define d_code(dist) \
1999   ((dist) < 256 ? dist_code[dist] : dist_code[256+((dist)>>7)])
2000/* Mapping from a distance to a distance code. dist is the distance - 1 and
2001 * must not have side effects. dist_code[256] and dist_code[257] are never
2002 * used.
2003 */
2004
2005/* ===========================================================================
2006 * Output a short LSB first on the stream.
2007 * IN assertion: there is enough room in pendingBuf.
2008 */
2009#define put_short(s, w) { \
2010    put_byte(s, (uch)((w) & 0xff)); \
2011    put_byte(s, (uch)((ush)(w) >> 8)); \
2012}
2013
2014/* ===========================================================================
2015 * Send a value on a given number of bits.
2016 * IN assertion: length <= 16 and value fits in length bits.
2017 */
2018#ifdef DEBUG_ZLIB
2019local void send_bits      OF((deflate_state *s, int value, int length));
2020
2021local void send_bits(s, value, length)
2022    deflate_state *s;
2023    int value;  /* value to send */
2024    int length; /* number of bits */
2025{
2026    Tracevv((stderr," l %2d v %4x ", length, value));
2027    Assert(length > 0 && length <= 15, "invalid length");
2028    s->bits_sent += (ulg)length;
2029
2030    /* If not enough room in bi_buf, use (valid) bits from bi_buf and
2031     * (16 - bi_valid) bits from value, leaving (width - (16-bi_valid))
2032     * unused bits in value.
2033     */
2034    if (s->bi_valid > (int)Buf_size - length) {
2035        s->bi_buf |= (value << s->bi_valid);
2036        put_short(s, s->bi_buf);
2037        s->bi_buf = (ush)value >> (Buf_size - s->bi_valid);
2038        s->bi_valid += length - Buf_size;
2039    } else {
2040        s->bi_buf |= value << s->bi_valid;
2041        s->bi_valid += length;
2042    }
2043}
2044#else /* !DEBUG_ZLIB */
2045
2046#define send_bits(s, value, length) \
2047{ int len = length;\
2048  if (s->bi_valid > (int)Buf_size - len) {\
2049    int val = value;\
2050    s->bi_buf |= (val << s->bi_valid);\
2051    put_short(s, s->bi_buf);\
2052    s->bi_buf = (ush)val >> (Buf_size - s->bi_valid);\
2053    s->bi_valid += len - Buf_size;\
2054  } else {\
2055    s->bi_buf |= (value) << s->bi_valid;\
2056    s->bi_valid += len;\
2057  }\
2058}
2059#endif /* DEBUG_ZLIB */
2060
2061
2062#define MAX(a,b) (a >= b ? a : b)
2063/* the arguments must not have side effects */
2064
2065/* ===========================================================================
2066 * Initialize the various 'constant' tables. In a multi-threaded environment,
2067 * this function may be called by two threads concurrently, but this is
2068 * harmless since both invocations do exactly the same thing.
2069 */
2070local void tr_static_init()
2071{
2072    static int static_init_done = 0;
2073    int n;        /* iterates over tree elements */
2074    int bits;     /* bit counter */
2075    int length;   /* length value */
2076    int code;     /* code value */
2077    int dist;     /* distance index */
2078    ush bl_count[MAX_BITS+1];
2079    /* number of codes at each bit length for an optimal tree */
2080
2081    if (static_init_done) return;
2082
2083    /* Initialize the mapping length (0..255) -> length code (0..28) */
2084    length = 0;
2085    for (code = 0; code < LENGTH_CODES-1; code++) {
2086        base_length[code] = length;
2087        for (n = 0; n < (1<<extra_lbits[code]); n++) {
2088            length_code[length++] = (uch)code;
2089        }
2090    }
2091    Assert (length == 256, "tr_static_init: length != 256");
2092    /* Note that the length 255 (match length 258) can be represented
2093     * in two different ways: code 284 + 5 bits or code 285, so we
2094     * overwrite length_code[255] to use the best encoding:
2095     */
2096    length_code[length-1] = (uch)code;
2097
2098    /* Initialize the mapping dist (0..32K) -> dist code (0..29) */
2099    dist = 0;
2100    for (code = 0 ; code < 16; code++) {
2101        base_dist[code] = dist;
2102        for (n = 0; n < (1<<extra_dbits[code]); n++) {
2103            dist_code[dist++] = (uch)code;
2104        }
2105    }
2106    Assert (dist == 256, "tr_static_init: dist != 256");
2107    dist >>= 7; /* from now on, all distances are divided by 128 */
2108    for ( ; code < D_CODES; code++) {
2109        base_dist[code] = dist << 7;
2110        for (n = 0; n < (1<<(extra_dbits[code]-7)); n++) {
2111            dist_code[256 + dist++] = (uch)code;
2112        }
2113    }
2114    Assert (dist == 256, "tr_static_init: 256+dist != 512");
2115
2116    /* Construct the codes of the static literal tree */
2117    for (bits = 0; bits <= MAX_BITS; bits++) bl_count[bits] = 0;
2118    n = 0;
2119    while (n <= 143) static_ltree[n++].Len = 8, bl_count[8]++;
2120    while (n <= 255) static_ltree[n++].Len = 9, bl_count[9]++;
2121    while (n <= 279) static_ltree[n++].Len = 7, bl_count[7]++;
2122    while (n <= 287) static_ltree[n++].Len = 8, bl_count[8]++;
2123    /* Codes 286 and 287 do not exist, but we must include them in the
2124     * tree construction to get a canonical Huffman tree (longest code
2125     * all ones)
2126     */
2127    gen_codes((ct_data *)static_ltree, L_CODES+1, bl_count);
2128
2129    /* The static distance tree is trivial: */
2130    for (n = 0; n < D_CODES; n++) {
2131        static_dtree[n].Len = 5;
2132        static_dtree[n].Code = bi_reverse((unsigned)n, 5);
2133    }
2134    static_init_done = 1;
2135}
2136
2137/* ===========================================================================
2138 * Initialize the tree data structures for a new zlib stream.
2139 */
2140void _tr_init(s)
2141    deflate_state *s;
2142{
2143    tr_static_init();
2144
2145    s->compressed_len = 0L;
2146
2147    s->l_desc.dyn_tree = s->dyn_ltree;
2148    s->l_desc.stat_desc = &static_l_desc;
2149
2150    s->d_desc.dyn_tree = s->dyn_dtree;
2151    s->d_desc.stat_desc = &static_d_desc;
2152
2153    s->bl_desc.dyn_tree = s->bl_tree;
2154    s->bl_desc.stat_desc = &static_bl_desc;
2155
2156    s->bi_buf = 0;
2157    s->bi_valid = 0;
2158    s->last_eob_len = 8; /* enough lookahead for inflate */
2159#ifdef DEBUG_ZLIB
2160    s->bits_sent = 0L;
2161#endif
2162
2163    /* Initialize the first block of the first file: */
2164    init_block(s);
2165}
2166
2167/* ===========================================================================
2168 * Initialize a new block.
2169 */
2170local void init_block(s)
2171    deflate_state *s;
2172{
2173    int n; /* iterates over tree elements */
2174
2175    /* Initialize the trees. */
2176    for (n = 0; n < L_CODES;  n++) s->dyn_ltree[n].Freq = 0;
2177    for (n = 0; n < D_CODES;  n++) s->dyn_dtree[n].Freq = 0;
2178    for (n = 0; n < BL_CODES; n++) s->bl_tree[n].Freq = 0;
2179
2180    s->dyn_ltree[END_BLOCK].Freq = 1;
2181    s->opt_len = s->static_len = 0L;
2182    s->last_lit = s->matches = 0;
2183}
2184
2185#define SMALLEST 1
2186/* Index within the heap array of least frequent node in the Huffman tree */
2187
2188
2189/* ===========================================================================
2190 * Remove the smallest element from the heap and recreate the heap with
2191 * one less element. Updates heap and heap_len.
2192 */
2193#define pqremove(s, tree, top) \
2194{\
2195    top = s->heap[SMALLEST]; \
2196    s->heap[SMALLEST] = s->heap[s->heap_len--]; \
2197    pqdownheap(s, tree, SMALLEST); \
2198}
2199
2200/* ===========================================================================
2201 * Compares to subtrees, using the tree depth as tie breaker when
2202 * the subtrees have equal frequency. This minimizes the worst case length.
2203 */
2204#define smaller(tree, n, m, depth) \
2205   (tree[n].Freq < tree[m].Freq || \
2206   (tree[n].Freq == tree[m].Freq && depth[n] <= depth[m]))
2207
2208/* ===========================================================================
2209 * Restore the heap property by moving down the tree starting at node k,
2210 * exchanging a node with the smallest of its two sons if necessary, stopping
2211 * when the heap property is re-established (each father smaller than its
2212 * two sons).
2213 */
2214local void pqdownheap(s, tree, k)
2215    deflate_state *s;
2216    ct_data *tree;  /* the tree to restore */
2217    int k;               /* node to move down */
2218{
2219    int v = s->heap[k];
2220    int j = k << 1;  /* left son of k */
2221    while (j <= s->heap_len) {
2222        /* Set j to the smallest of the two sons: */
2223        if (j < s->heap_len &&
2224            smaller(tree, s->heap[j+1], s->heap[j], s->depth)) {
2225            j++;
2226        }
2227        /* Exit if v is smaller than both sons */
2228        if (smaller(tree, v, s->heap[j], s->depth)) break;
2229
2230        /* Exchange v with the smallest son */
2231        s->heap[k] = s->heap[j];  k = j;
2232
2233        /* And continue down the tree, setting j to the left son of k */
2234        j <<= 1;
2235    }
2236    s->heap[k] = v;
2237}
2238
2239/* ===========================================================================
2240 * Compute the optimal bit lengths for a tree and update the total bit length
2241 * for the current block.
2242 * IN assertion: the fields freq and dad are set, heap[heap_max] and
2243 *    above are the tree nodes sorted by increasing frequency.
2244 * OUT assertions: the field len is set to the optimal bit length, the
2245 *     array bl_count contains the frequencies for each bit length.
2246 *     The length opt_len is updated; static_len is also updated if stree is
2247 *     not null.
2248 */
2249local void gen_bitlen(s, desc)
2250    deflate_state *s;
2251    tree_desc *desc;    /* the tree descriptor */
2252{
2253    ct_data *tree  = desc->dyn_tree;
2254    int max_code   = desc->max_code;
2255    ct_data *stree = desc->stat_desc->static_tree;
2256    intf *extra    = desc->stat_desc->extra_bits;
2257    int base       = desc->stat_desc->extra_base;
2258    int max_length = desc->stat_desc->max_length;
2259    int h;              /* heap index */
2260    int n, m;           /* iterate over the tree elements */
2261    int bits;           /* bit length */
2262    int xbits;          /* extra bits */
2263    ush f;              /* frequency */
2264    int overflow = 0;   /* number of elements with bit length too large */
2265
2266    for (bits = 0; bits <= MAX_BITS; bits++) s->bl_count[bits] = 0;
2267
2268    /* In a first pass, compute the optimal bit lengths (which may
2269     * overflow in the case of the bit length tree).
2270     */
2271    tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */
2272
2273    for (h = s->heap_max+1; h < HEAP_SIZE; h++) {
2274        n = s->heap[h];
2275        bits = tree[tree[n].Dad].Len + 1;
2276        if (bits > max_length) bits = max_length, overflow++;
2277        tree[n].Len = (ush)bits;
2278        /* We overwrite tree[n].Dad which is no longer needed */
2279
2280        if (n > max_code) continue; /* not a leaf node */
2281
2282        s->bl_count[bits]++;
2283        xbits = 0;
2284        if (n >= base) xbits = extra[n-base];
2285        f = tree[n].Freq;
2286        s->opt_len += (ulg)f * (bits + xbits);
2287        if (stree) s->static_len += (ulg)f * (stree[n].Len + xbits);
2288    }
2289    if (overflow == 0) return;
2290
2291    Trace((stderr,"\nbit length overflow\n"));
2292    /* This happens for example on obj2 and pic of the Calgary corpus */
2293
2294    /* Find the first bit length which could increase: */
2295    do {
2296        bits = max_length-1;
2297        while (s->bl_count[bits] == 0) bits--;
2298        s->bl_count[bits]--;      /* move one leaf down the tree */
2299        s->bl_count[bits+1] += 2; /* move one overflow item as its brother */
2300        s->bl_count[max_length]--;
2301        /* The brother of the overflow item also moves one step up,
2302         * but this does not affect bl_count[max_length]
2303         */
2304        overflow -= 2;
2305    } while (overflow > 0);
2306
2307    /* Now recompute all bit lengths, scanning in increasing frequency.
2308     * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all
2309     * lengths instead of fixing only the wrong ones. This idea is taken
2310     * from 'ar' written by Haruhiko Okumura.)
2311     */
2312    for (bits = max_length; bits != 0; bits--) {
2313        n = s->bl_count[bits];
2314        while (n != 0) {
2315            m = s->heap[--h];
2316            if (m > max_code) continue;
2317            if (tree[m].Len != (unsigned) bits) {
2318                Trace((stderr,"code %d bits %d->%d\n", m, tree[m].Len, bits));
2319                s->opt_len += ((long)bits - (long)tree[m].Len)
2320                              *(long)tree[m].Freq;
2321                tree[m].Len = (ush)bits;
2322            }
2323            n--;
2324        }
2325    }
2326}
2327
2328/* ===========================================================================
2329 * Generate the codes for a given tree and bit counts (which need not be
2330 * optimal).
2331 * IN assertion: the array bl_count contains the bit length statistics for
2332 * the given tree and the field len is set for all tree elements.
2333 * OUT assertion: the field code is set for all tree elements of non
2334 *     zero code length.
2335 */
2336local void gen_codes (tree, max_code, bl_count)
2337    ct_data *tree;             /* the tree to decorate */
2338    int max_code;              /* largest code with non zero frequency */
2339    ushf *bl_count;            /* number of codes at each bit length */
2340{
2341    ush next_code[MAX_BITS+1]; /* next code value for each bit length */
2342    ush code = 0;              /* running code value */
2343    int bits;                  /* bit index */
2344    int n;                     /* code index */
2345
2346    /* The distribution counts are first used to generate the code values
2347     * without bit reversal.
2348     */
2349    for (bits = 1; bits <= MAX_BITS; bits++) {
2350        next_code[bits] = code = (code + bl_count[bits-1]) << 1;
2351    }
2352    /* Check that the bit counts in bl_count are consistent. The last code
2353     * must be all ones.
2354     */
2355    Assert (code + bl_count[MAX_BITS]-1 == (1<<MAX_BITS)-1,
2356            "inconsistent bit counts");
2357    Tracev((stderr,"\ngen_codes: max_code %d ", max_code));
2358
2359    for (n = 0;  n <= max_code; n++) {
2360        int len = tree[n].Len;
2361        if (len == 0) continue;
2362        /* Now reverse the bits */
2363        tree[n].Code = bi_reverse(next_code[len]++, len);
2364
2365        Tracecv(tree != static_ltree, (stderr,"\nn %3d %c l %2d c %4x (%x) ",
2366             n, (isgraph(n) ? n : ' '), len, tree[n].Code, next_code[len]-1));
2367    }
2368}
2369
2370/* ===========================================================================
2371 * Construct one Huffman tree and assigns the code bit strings and lengths.
2372 * Update the total bit length for the current block.
2373 * IN assertion: the field freq is set for all tree elements.
2374 * OUT assertions: the fields len and code are set to the optimal bit length
2375 *     and corresponding code. The length opt_len is updated; static_len is
2376 *     also updated if stree is not null. The field max_code is set.
2377 */
2378local void build_tree(s, desc)
2379    deflate_state *s;
2380    tree_desc *desc; /* the tree descriptor */
2381{
2382    ct_data *tree   = desc->dyn_tree;
2383    ct_data *stree  = desc->stat_desc->static_tree;
2384    int elems       = desc->stat_desc->elems;
2385    int n, m;          /* iterate over heap elements */
2386    int max_code = -1; /* largest code with non zero frequency */
2387    int node;          /* new node being created */
2388
2389    /* Construct the initial heap, with least frequent element in
2390     * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1].
2391     * heap[0] is not used.
2392     */
2393    s->heap_len = 0, s->heap_max = HEAP_SIZE;
2394
2395    for (n = 0; n < elems; n++) {
2396        if (tree[n].Freq != 0) {
2397            s->heap[++(s->heap_len)] = max_code = n;
2398            s->depth[n] = 0;
2399        } else {
2400            tree[n].Len = 0;
2401        }
2402    }
2403
2404    /* The pkzip format requires that at least one distance code exists,
2405     * and that at least one bit should be sent even if there is only one
2406     * possible code. So to avoid special checks later on we force at least
2407     * two codes of non zero frequency.
2408     */
2409    while (s->heap_len < 2) {
2410        node = s->heap[++(s->heap_len)] = (max_code < 2 ? ++max_code : 0);
2411        tree[node].Freq = 1;
2412        s->depth[node] = 0;
2413        s->opt_len--; if (stree) s->static_len -= stree[node].Len;
2414        /* node is 0 or 1 so it does not have extra bits */
2415    }
2416    desc->max_code = max_code;
2417
2418    /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree,
2419     * establish sub-heaps of increasing lengths:
2420     */
2421    for (n = s->heap_len/2; n >= 1; n--) pqdownheap(s, tree, n);
2422
2423    /* Construct the Huffman tree by repeatedly combining the least two
2424     * frequent nodes.
2425     */
2426    node = elems;              /* next internal node of the tree */
2427    do {
2428        pqremove(s, tree, n);  /* n = node of least frequency */
2429        m = s->heap[SMALLEST]; /* m = node of next least frequency */
2430
2431        s->heap[--(s->heap_max)] = n; /* keep the nodes sorted by frequency */
2432        s->heap[--(s->heap_max)] = m;
2433
2434        /* Create a new node father of n and m */
2435        tree[node].Freq = tree[n].Freq + tree[m].Freq;
2436        s->depth[node] = (uch) (MAX(s->depth[n], s->depth[m]) + 1);
2437        tree[n].Dad = tree[m].Dad = (ush)node;
2438#ifdef DUMP_BL_TREE
2439        if (tree == s->bl_tree) {
2440            fprintf(stderr,"\nnode %d(%d), sons %d(%d) %d(%d)",
2441                    node, tree[node].Freq, n, tree[n].Freq, m, tree[m].Freq);
2442        }
2443#endif
2444        /* and insert the new node in the heap */
2445        s->heap[SMALLEST] = node++;
2446        pqdownheap(s, tree, SMALLEST);
2447
2448    } while (s->heap_len >= 2);
2449
2450    s->heap[--(s->heap_max)] = s->heap[SMALLEST];
2451
2452    /* At this point, the fields freq and dad are set. We can now
2453     * generate the bit lengths.
2454     */
2455    gen_bitlen(s, (tree_desc *)desc);
2456
2457    /* The field len is now set, we can generate the bit codes */
2458    gen_codes ((ct_data *)tree, max_code, s->bl_count);
2459}
2460
2461/* ===========================================================================
2462 * Scan a literal or distance tree to determine the frequencies of the codes
2463 * in the bit length tree.
2464 */
2465local void scan_tree (s, tree, max_code)
2466    deflate_state *s;
2467    ct_data *tree;   /* the tree to be scanned */
2468    int max_code;    /* and its largest code of non zero frequency */
2469{
2470    int n;                     /* iterates over all tree elements */
2471    int prevlen = -1;          /* last emitted length */
2472    int curlen;                /* length of current code */
2473    int nextlen = tree[0].Len; /* length of next code */
2474    int count = 0;             /* repeat count of the current code */
2475    int max_count = 7;         /* max repeat count */
2476    int min_count = 4;         /* min repeat count */
2477
2478    if (nextlen == 0) max_count = 138, min_count = 3;
2479    tree[max_code+1].Len = (ush)0xffff; /* guard */
2480
2481    for (n = 0; n <= max_code; n++) {
2482        curlen = nextlen; nextlen = tree[n+1].Len;
2483        if (++count < max_count && curlen == nextlen) {
2484            continue;
2485        } else if (count < min_count) {
2486            s->bl_tree[curlen].Freq += count;
2487        } else if (curlen != 0) {
2488            if (curlen != prevlen) s->bl_tree[curlen].Freq++;
2489            s->bl_tree[REP_3_6].Freq++;
2490        } else if (count <= 10) {
2491            s->bl_tree[REPZ_3_10].Freq++;
2492        } else {
2493            s->bl_tree[REPZ_11_138].Freq++;
2494        }
2495        count = 0; prevlen = curlen;
2496        if (nextlen == 0) {
2497            max_count = 138, min_count = 3;
2498        } else if (curlen == nextlen) {
2499            max_count = 6, min_count = 3;
2500        } else {
2501            max_count = 7, min_count = 4;
2502        }
2503    }
2504}
2505
2506/* ===========================================================================
2507 * Send a literal or distance tree in compressed form, using the codes in
2508 * bl_tree.
2509 */
2510local void send_tree (s, tree, max_code)
2511    deflate_state *s;
2512    ct_data *tree; /* the tree to be scanned */
2513    int max_code;       /* and its largest code of non zero frequency */
2514{
2515    int n;                     /* iterates over all tree elements */
2516    int prevlen = -1;          /* last emitted length */
2517    int curlen;                /* length of current code */
2518    int nextlen = tree[0].Len; /* length of next code */
2519    int count = 0;             /* repeat count of the current code */
2520    int max_count = 7;         /* max repeat count */
2521    int min_count = 4;         /* min repeat count */
2522
2523    /* tree[max_code+1].Len = -1; */  /* guard already set */
2524    if (nextlen == 0) max_count = 138, min_count = 3;
2525
2526    for (n = 0; n <= max_code; n++) {
2527        curlen = nextlen; nextlen = tree[n+1].Len;
2528        if (++count < max_count && curlen == nextlen) {
2529            continue;
2530        } else if (count < min_count) {
2531            do { send_code(s, curlen, s->bl_tree); } while (--count != 0);
2532
2533        } else if (curlen != 0) {
2534            if (curlen != prevlen) {
2535                send_code(s, curlen, s->bl_tree); count--;
2536            }
2537            Assert(count >= 3 && count <= 6, " 3_6?");
2538            send_code(s, REP_3_6, s->bl_tree); send_bits(s, count-3, 2);
2539
2540        } else if (count <= 10) {
2541            send_code(s, REPZ_3_10, s->bl_tree); send_bits(s, count-3, 3);
2542
2543        } else {
2544            send_code(s, REPZ_11_138, s->bl_tree); send_bits(s, count-11, 7);
2545        }
2546        count = 0; prevlen = curlen;
2547        if (nextlen == 0) {
2548            max_count = 138, min_count = 3;
2549        } else if (curlen == nextlen) {
2550            max_count = 6, min_count = 3;
2551        } else {
2552            max_count = 7, min_count = 4;
2553        }
2554    }
2555}
2556
2557/* ===========================================================================
2558 * Construct the Huffman tree for the bit lengths and return the index in
2559 * bl_order of the last bit length code to send.
2560 */
2561local int build_bl_tree(s)
2562    deflate_state *s;
2563{
2564    int max_blindex;  /* index of last bit length code of non zero freq */
2565
2566    /* Determine the bit length frequencies for literal and distance trees */
2567    scan_tree(s, (ct_data *)s->dyn_ltree, s->l_desc.max_code);
2568    scan_tree(s, (ct_data *)s->dyn_dtree, s->d_desc.max_code);
2569
2570    /* Build the bit length tree: */
2571    build_tree(s, (tree_desc *)(&(s->bl_desc)));
2572    /* opt_len now includes the length of the tree representations, except
2573     * the lengths of the bit lengths codes and the 5+5+4 bits for the counts.
2574     */
2575
2576    /* Determine the number of bit length codes to send. The pkzip format
2577     * requires that at least 4 bit length codes be sent. (appnote.txt says
2578     * 3 but the actual value used is 4.)
2579     */
2580    for (max_blindex = BL_CODES-1; max_blindex >= 3; max_blindex--) {
2581        if (s->bl_tree[bl_order[max_blindex]].Len != 0) break;
2582    }
2583    /* Update opt_len to include the bit length tree and counts */
2584    s->opt_len += 3*(max_blindex+1) + 5+5+4;
2585    Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld",
2586            s->opt_len, s->static_len));
2587
2588    return max_blindex;
2589}
2590
2591/* ===========================================================================
2592 * Send the header for a block using dynamic Huffman trees: the counts, the
2593 * lengths of the bit length codes, the literal tree and the distance tree.
2594 * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4.
2595 */
2596local void send_all_trees(s, lcodes, dcodes, blcodes)
2597    deflate_state *s;
2598    int lcodes, dcodes, blcodes; /* number of codes for each tree */
2599{
2600    int rank;                    /* index in bl_order */
2601
2602    Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes");
2603    Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES,
2604            "too many codes");
2605    Tracev((stderr, "\nbl counts: "));
2606    send_bits(s, lcodes-257, 5); /* not +255 as stated in appnote.txt */
2607    send_bits(s, dcodes-1,   5);
2608    send_bits(s, blcodes-4,  4); /* not -3 as stated in appnote.txt */
2609    for (rank = 0; rank < blcodes; rank++) {
2610        Tracev((stderr, "\nbl code %2d ", bl_order[rank]));
2611        send_bits(s, s->bl_tree[bl_order[rank]].Len, 3);
2612    }
2613    Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent));
2614
2615    send_tree(s, (ct_data *)s->dyn_ltree, lcodes-1); /* literal tree */
2616    Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent));
2617
2618    send_tree(s, (ct_data *)s->dyn_dtree, dcodes-1); /* distance tree */
2619    Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent));
2620}
2621
2622/* ===========================================================================
2623 * Send a stored block
2624 */
2625void _tr_stored_block(s, buf, stored_len, eof)
2626    deflate_state *s;
2627    charf *buf;       /* input block */
2628    ulg stored_len;   /* length of input block */
2629    int eof;          /* true if this is the last block for a file */
2630{
2631    send_bits(s, (STORED_BLOCK<<1)+eof, 3);  /* send block type */
2632    s->compressed_len = (s->compressed_len + 3 + 7) & (ulg)~7L;
2633    s->compressed_len += (stored_len + 4) << 3;
2634
2635    copy_block(s, buf, (unsigned)stored_len, 1); /* with header */
2636}
2637
2638/* Send just the `stored block' type code without any length bytes or data.
2639 */
2640void _tr_stored_type_only(s)
2641    deflate_state *s;
2642{
2643    send_bits(s, (STORED_BLOCK << 1), 3);
2644    bi_windup(s);
2645    s->compressed_len = (s->compressed_len + 3) & ~7L;
2646}
2647
2648
2649/* ===========================================================================
2650 * Send one empty static block to give enough lookahead for inflate.
2651 * This takes 10 bits, of which 7 may remain in the bit buffer.
2652 * The current inflate code requires 9 bits of lookahead. If the
2653 * last two codes for the previous block (real code plus EOB) were coded
2654 * on 5 bits or less, inflate may have only 5+3 bits of lookahead to decode
2655 * the last real code. In this case we send two empty static blocks instead
2656 * of one. (There are no problems if the previous block is stored or fixed.)
2657 * To simplify the code, we assume the worst case of last real code encoded
2658 * on one bit only.
2659 */
2660void _tr_align(s)
2661    deflate_state *s;
2662{
2663    send_bits(s, STATIC_TREES<<1, 3);
2664    send_code(s, END_BLOCK, static_ltree);
2665    s->compressed_len += 10L; /* 3 for block type, 7 for EOB */
2666    bi_flush(s);
2667    /* Of the 10 bits for the empty block, we have already sent
2668     * (10 - bi_valid) bits. The lookahead for the last real code (before
2669     * the EOB of the previous block) was thus at least one plus the length
2670     * of the EOB plus what we have just sent of the empty static block.
2671     */
2672    if (1 + s->last_eob_len + 10 - s->bi_valid < 9) {
2673        send_bits(s, STATIC_TREES<<1, 3);
2674        send_code(s, END_BLOCK, static_ltree);
2675        s->compressed_len += 10L;
2676        bi_flush(s);
2677    }
2678    s->last_eob_len = 7;
2679}
2680
2681/* ===========================================================================
2682 * Determine the best encoding for the current block: dynamic trees, static
2683 * trees or store, and output the encoded block to the zip file. This function
2684 * returns the total compressed length for the file so far.
2685 */
2686ulg _tr_flush_block(s, buf, stored_len, eof)
2687    deflate_state *s;
2688    charf *buf;       /* input block, or NULL if too old */
2689    ulg stored_len;   /* length of input block */
2690    int eof;          /* true if this is the last block for a file */
2691{
2692    ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */
2693    int max_blindex = 0;  /* index of last bit length code of non zero freq */
2694
2695    /* Build the Huffman trees unless a stored block is forced */
2696    if (s->level > 0) {
2697
2698	 /* Check if the file is ascii or binary */
2699	if (s->data_type == Z_UNKNOWN) set_data_type(s);
2700
2701	/* Construct the literal and distance trees */
2702	build_tree(s, (tree_desc *)(&(s->l_desc)));
2703	Tracev((stderr, "\nlit data: dyn %ld, stat %ld", s->opt_len,
2704		s->static_len));
2705
2706	build_tree(s, (tree_desc *)(&(s->d_desc)));
2707	Tracev((stderr, "\ndist data: dyn %ld, stat %ld", s->opt_len,
2708		s->static_len));
2709	/* At this point, opt_len and static_len are the total bit lengths of
2710	 * the compressed block data, excluding the tree representations.
2711	 */
2712
2713	/* Build the bit length tree for the above two trees, and get the index
2714	 * in bl_order of the last bit length code to send.
2715	 */
2716	max_blindex = build_bl_tree(s);
2717
2718	/* Determine the best encoding. Compute first the block length in bytes*/
2719	opt_lenb = (s->opt_len+3+7)>>3;
2720	static_lenb = (s->static_len+3+7)>>3;
2721
2722	Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ",
2723		opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len,
2724		s->last_lit));
2725
2726	if (static_lenb <= opt_lenb) opt_lenb = static_lenb;
2727
2728    } else {
2729        Assert(buf != (char*)0, "lost buf");
2730	opt_lenb = static_lenb = stored_len + 5; /* force a stored block */
2731    }
2732
2733    /* If compression failed and this is the first and last block,
2734     * and if the .zip file can be seeked (to rewrite the local header),
2735     * the whole file is transformed into a stored file:
2736     */
2737#ifdef STORED_FILE_OK
2738#  ifdef FORCE_STORED_FILE
2739    if (eof && s->compressed_len == 0L) { /* force stored file */
2740#  else
2741    if (stored_len <= opt_lenb && eof && s->compressed_len==0L && seekable()) {
2742#  endif
2743        /* Since LIT_BUFSIZE <= 2*WSIZE, the input data must be there: */
2744        if (buf == (charf*)0) error ("block vanished");
2745
2746        copy_block(s, buf, (unsigned)stored_len, 0); /* without header */
2747        s->compressed_len = stored_len << 3;
2748        s->method = STORED;
2749    } else
2750#endif /* STORED_FILE_OK */
2751
2752#ifdef FORCE_STORED
2753    if (buf != (char*)0) { /* force stored block */
2754#else
2755    if (stored_len+4 <= opt_lenb && buf != (char*)0) {
2756                       /* 4: two words for the lengths */
2757#endif
2758        /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE.
2759         * Otherwise we can't have processed more than WSIZE input bytes since
2760         * the last block flush, because compression would have been
2761         * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to
2762         * transform a block into a stored block.
2763         */
2764        _tr_stored_block(s, buf, stored_len, eof);
2765
2766#ifdef FORCE_STATIC
2767    } else if (static_lenb >= 0) { /* force static trees */
2768#else
2769    } else if (static_lenb == opt_lenb) {
2770#endif
2771        send_bits(s, (STATIC_TREES<<1)+eof, 3);
2772        compress_block(s, (ct_data *)static_ltree, (ct_data *)static_dtree);
2773        s->compressed_len += 3 + s->static_len;
2774    } else {
2775        send_bits(s, (DYN_TREES<<1)+eof, 3);
2776        send_all_trees(s, s->l_desc.max_code+1, s->d_desc.max_code+1,
2777                       max_blindex+1);
2778        compress_block(s, (ct_data *)s->dyn_ltree, (ct_data *)s->dyn_dtree);
2779        s->compressed_len += 3 + s->opt_len;
2780    }
2781    Assert (s->compressed_len == s->bits_sent, "bad compressed size");
2782    init_block(s);
2783
2784    if (eof) {
2785        bi_windup(s);
2786        s->compressed_len += 7;  /* align on byte boundary */
2787    }
2788    Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3,
2789           s->compressed_len-7*eof));
2790
2791    return s->compressed_len >> 3;
2792}
2793
2794/* ===========================================================================
2795 * Save the match info and tally the frequency counts. Return true if
2796 * the current block must be flushed.
2797 */
2798int _tr_tally (s, dist, lc)
2799    deflate_state *s;
2800    unsigned dist;  /* distance of matched string */
2801    unsigned lc;    /* match length-MIN_MATCH or unmatched char (if dist==0) */
2802{
2803    s->d_buf[s->last_lit] = (ush)dist;
2804    s->l_buf[s->last_lit++] = (uch)lc;
2805    if (dist == 0) {
2806        /* lc is the unmatched char */
2807        s->dyn_ltree[lc].Freq++;
2808    } else {
2809        s->matches++;
2810        /* Here, lc is the match length - MIN_MATCH */
2811        dist--;             /* dist = match distance - 1 */
2812        Assert((ush)dist < (ush)MAX_DIST(s) &&
2813               (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) &&
2814               (ush)d_code(dist) < (ush)D_CODES,  "_tr_tally: bad match");
2815
2816        s->dyn_ltree[length_code[lc]+LITERALS+1].Freq++;
2817        s->dyn_dtree[d_code(dist)].Freq++;
2818    }
2819
2820    /* Try to guess if it is profitable to stop the current block here */
2821    if (s->level > 2 && (s->last_lit & 0xfff) == 0) {
2822        /* Compute an upper bound for the compressed length */
2823        ulg out_length = (ulg)s->last_lit*8L;
2824        ulg in_length = (ulg)((long)s->strstart - s->block_start);
2825        int dcode;
2826        for (dcode = 0; dcode < D_CODES; dcode++) {
2827            out_length += (ulg)s->dyn_dtree[dcode].Freq *
2828                (5L+extra_dbits[dcode]);
2829        }
2830        out_length >>= 3;
2831        Tracev((stderr,"\nlast_lit %u, in %ld, out ~%ld(%ld%%) ",
2832               s->last_lit, in_length, out_length,
2833               100L - out_length*100L/in_length));
2834        if (s->matches < s->last_lit/2 && out_length < in_length/2) return 1;
2835    }
2836    return (s->last_lit == s->lit_bufsize-1);
2837    /* We avoid equality with lit_bufsize because of wraparound at 64K
2838     * on 16 bit machines and because stored blocks are restricted to
2839     * 64K-1 bytes.
2840     */
2841}
2842
2843/* ===========================================================================
2844 * Send the block data compressed using the given Huffman trees
2845 */
2846local void compress_block(s, ltree, dtree)
2847    deflate_state *s;
2848    ct_data *ltree; /* literal tree */
2849    ct_data *dtree; /* distance tree */
2850{
2851    unsigned dist;      /* distance of matched string */
2852    int lc;             /* match length or unmatched char (if dist == 0) */
2853    unsigned lx = 0;    /* running index in l_buf */
2854    unsigned code;      /* the code to send */
2855    int extra;          /* number of extra bits to send */
2856
2857    if (s->last_lit != 0) do {
2858        dist = s->d_buf[lx];
2859        lc = s->l_buf[lx++];
2860        if (dist == 0) {
2861            send_code(s, lc, ltree); /* send a literal byte */
2862            Tracecv(isgraph(lc), (stderr," '%c' ", lc));
2863        } else {
2864            /* Here, lc is the match length - MIN_MATCH */
2865            code = length_code[lc];
2866            send_code(s, code+LITERALS+1, ltree); /* send the length code */
2867            extra = extra_lbits[code];
2868            if (extra != 0) {
2869                lc -= base_length[code];
2870                send_bits(s, lc, extra);       /* send the extra length bits */
2871            }
2872            dist--; /* dist is now the match distance - 1 */
2873            code = d_code(dist);
2874            Assert (code < D_CODES, "bad d_code");
2875
2876            send_code(s, code, dtree);       /* send the distance code */
2877            extra = extra_dbits[code];
2878            if (extra != 0) {
2879                dist -= base_dist[code];
2880                send_bits(s, dist, extra);   /* send the extra distance bits */
2881            }
2882        } /* literal or match pair ? */
2883
2884        /* Check that the overlay between pending_buf and d_buf+l_buf is ok: */
2885        Assert(s->pending < s->lit_bufsize + 2*lx, "pendingBuf overflow");
2886
2887    } while (lx < s->last_lit);
2888
2889    send_code(s, END_BLOCK, ltree);
2890    s->last_eob_len = ltree[END_BLOCK].Len;
2891}
2892
2893/* ===========================================================================
2894 * Set the data type to ASCII or BINARY, using a crude approximation:
2895 * binary if more than 20% of the bytes are <= 6 or >= 128, ascii otherwise.
2896 * IN assertion: the fields freq of dyn_ltree are set and the total of all
2897 * frequencies does not exceed 64K (to fit in an int on 16 bit machines).
2898 */
2899local void set_data_type(s)
2900    deflate_state *s;
2901{
2902    int n = 0;
2903    unsigned ascii_freq = 0;
2904    unsigned bin_freq = 0;
2905    while (n < 7)        bin_freq += s->dyn_ltree[n++].Freq;
2906    while (n < 128)    ascii_freq += s->dyn_ltree[n++].Freq;
2907    while (n < LITERALS) bin_freq += s->dyn_ltree[n++].Freq;
2908    s->data_type = (Byte)(bin_freq > (ascii_freq >> 2) ? Z_BINARY : Z_ASCII);
2909}
2910
2911/* ===========================================================================
2912 * Reverse the first len bits of a code, using straightforward code (a faster
2913 * method would use a table)
2914 * IN assertion: 1 <= len <= 15
2915 */
2916local unsigned bi_reverse(code, len)
2917    unsigned code; /* the value to invert */
2918    int len;       /* its bit length */
2919{
2920    register unsigned res = 0;
2921    do {
2922        res |= code & 1;
2923        code >>= 1, res <<= 1;
2924    } while (--len > 0);
2925    return res >> 1;
2926}
2927
2928/* ===========================================================================
2929 * Flush the bit buffer, keeping at most 7 bits in it.
2930 */
2931local void bi_flush(s)
2932    deflate_state *s;
2933{
2934    if (s->bi_valid == 16) {
2935        put_short(s, s->bi_buf);
2936        s->bi_buf = 0;
2937        s->bi_valid = 0;
2938    } else if (s->bi_valid >= 8) {
2939        put_byte(s, (Byte)s->bi_buf);
2940        s->bi_buf >>= 8;
2941        s->bi_valid -= 8;
2942    }
2943}
2944
2945/* ===========================================================================
2946 * Flush the bit buffer and align the output on a byte boundary
2947 */
2948local void bi_windup(s)
2949    deflate_state *s;
2950{
2951    if (s->bi_valid > 8) {
2952        put_short(s, s->bi_buf);
2953    } else if (s->bi_valid > 0) {
2954        put_byte(s, (Byte)s->bi_buf);
2955    }
2956    s->bi_buf = 0;
2957    s->bi_valid = 0;
2958#ifdef DEBUG_ZLIB
2959    s->bits_sent = (s->bits_sent+7) & ~7;
2960#endif
2961}
2962
2963/* ===========================================================================
2964 * Copy a stored block, storing first the length and its
2965 * one's complement if requested.
2966 */
2967local void copy_block(s, buf, len, header)
2968    deflate_state *s;
2969    charf    *buf;    /* the input data */
2970    unsigned len;     /* its length */
2971    int      header;  /* true if block header must be written */
2972{
2973    bi_windup(s);        /* align on byte boundary */
2974    s->last_eob_len = 8; /* enough lookahead for inflate */
2975
2976    if (header) {
2977        put_short(s, (ush)len);
2978        put_short(s, (ush)~len);
2979#ifdef DEBUG_ZLIB
2980        s->bits_sent += 2*16;
2981#endif
2982    }
2983#ifdef DEBUG_ZLIB
2984    s->bits_sent += (ulg)len<<3;
2985#endif
2986    /* bundle up the put_byte(s, *buf++) calls */
2987    zmemcpy(&s->pending_buf[s->pending], buf, len);
2988    s->pending += len;
2989}
2990/* --- trees.c */
2991
2992/* +++ inflate.c */
2993/* inflate.c -- zlib interface to inflate modules
2994 * Copyright (C) 1995-1996 Mark Adler
2995 * For conditions of distribution and use, see copyright notice in zlib.h
2996 */
2997
2998/* #include "zutil.h" */
2999
3000/* +++ infblock.h */
3001/* infblock.h -- header to use infblock.c
3002 * Copyright (C) 1995-1996 Mark Adler
3003 * For conditions of distribution and use, see copyright notice in zlib.h
3004 */
3005
3006/* WARNING: this file should *not* be used by applications. It is
3007   part of the implementation of the compression library and is
3008   subject to change. Applications should only use zlib.h.
3009 */
3010
3011struct inflate_blocks_state;
3012typedef struct inflate_blocks_state FAR inflate_blocks_statef;
3013
3014extern inflate_blocks_statef * inflate_blocks_new OF((
3015    z_streamp z,
3016    check_func c,               /* check function */
3017    uInt w));                   /* window size */
3018
3019extern int inflate_blocks OF((
3020    inflate_blocks_statef *,
3021    z_streamp ,
3022    int));                      /* initial return code */
3023
3024extern void inflate_blocks_reset OF((
3025    inflate_blocks_statef *,
3026    z_streamp ,
3027    uLongf *));                  /* check value on output */
3028
3029extern int inflate_blocks_free OF((
3030    inflate_blocks_statef *,
3031    z_streamp ,
3032    uLongf *));                  /* check value on output */
3033
3034extern void inflate_set_dictionary OF((
3035    inflate_blocks_statef *s,
3036    const Bytef *d,  /* dictionary */
3037    uInt  n));       /* dictionary length */
3038
3039extern int inflate_addhistory OF((
3040    inflate_blocks_statef *,
3041    z_streamp));
3042
3043extern int inflate_packet_flush OF((
3044    inflate_blocks_statef *));
3045/* --- infblock.h */
3046
3047#ifndef NO_DUMMY_DECL
3048struct inflate_blocks_state {int dummy;}; /* for buggy compilers */
3049#endif
3050
3051/* inflate private state */
3052struct internal_state {
3053
3054  /* mode */
3055  enum {
3056      METHOD,   /* waiting for method byte */
3057      FLAG,     /* waiting for flag byte */
3058      DICT4,    /* four dictionary check bytes to go */
3059      DICT3,    /* three dictionary check bytes to go */
3060      DICT2,    /* two dictionary check bytes to go */
3061      DICT1,    /* one dictionary check byte to go */
3062      DICT0,    /* waiting for inflateSetDictionary */
3063      BLOCKS,   /* decompressing blocks */
3064      CHECK4,   /* four check bytes to go */
3065      CHECK3,   /* three check bytes to go */
3066      CHECK2,   /* two check bytes to go */
3067      CHECK1,   /* one check byte to go */
3068      DONE,     /* finished check, done */
3069      BAD}      /* got an error--stay here */
3070    mode;               /* current inflate mode */
3071
3072  /* mode dependent information */
3073  union {
3074    uInt method;        /* if FLAGS, method byte */
3075    struct {
3076      uLong was;                /* computed check value */
3077      uLong need;               /* stream check value */
3078    } check;            /* if CHECK, check values to compare */
3079    uInt marker;        /* if BAD, inflateSync's marker bytes count */
3080  } sub;        /* submode */
3081
3082  /* mode independent information */
3083  int  nowrap;          /* flag for no wrapper */
3084  uInt wbits;           /* log2(window size)  (8..15, defaults to 15) */
3085  inflate_blocks_statef
3086    *blocks;            /* current inflate_blocks state */
3087
3088};
3089
3090
3091int inflateReset(z)
3092z_streamp z;
3093{
3094  uLong c;
3095
3096  if (z == Z_NULL || z->state == Z_NULL)
3097    return Z_STREAM_ERROR;
3098  z->total_in = z->total_out = 0;
3099  z->msg = Z_NULL;
3100  z->state->mode = z->state->nowrap ? BLOCKS : METHOD;
3101  inflate_blocks_reset(z->state->blocks, z, &c);
3102  Trace((stderr, "inflate: reset\n"));
3103  return Z_OK;
3104}
3105
3106
3107int inflateEnd(z)
3108z_streamp z;
3109{
3110  uLong c;
3111
3112  if (z == Z_NULL || z->state == Z_NULL || z->zfree == Z_NULL)
3113    return Z_STREAM_ERROR;
3114  if (z->state->blocks != Z_NULL)
3115    inflate_blocks_free(z->state->blocks, z, &c);
3116  ZFREE(z, z->state);
3117  z->state = Z_NULL;
3118  Trace((stderr, "inflate: end\n"));
3119  return Z_OK;
3120}
3121
3122
3123int inflateInit2_(z, w, version, stream_size)
3124z_streamp z;
3125int w;
3126const char *version;
3127int stream_size;
3128{
3129  if (version == Z_NULL || version[0] != ZLIB_VERSION[0] ||
3130      stream_size != sizeof(z_stream))
3131      return Z_VERSION_ERROR;
3132
3133  /* initialize state */
3134  if (z == Z_NULL)
3135    return Z_STREAM_ERROR;
3136  z->msg = Z_NULL;
3137#ifndef NO_ZCFUNCS
3138  if (z->zalloc == Z_NULL)
3139  {
3140    z->zalloc = zcalloc;
3141    z->opaque = (voidpf)0;
3142  }
3143  if (z->zfree == Z_NULL) z->zfree = zcfree;
3144#endif
3145  if ((z->state = (struct internal_state FAR *)
3146       ZALLOC(z,1,sizeof(struct internal_state))) == Z_NULL)
3147    return Z_MEM_ERROR;
3148  z->state->blocks = Z_NULL;
3149
3150  /* handle undocumented nowrap option (no zlib header or check) */
3151  z->state->nowrap = 0;
3152  if (w < 0)
3153  {
3154    w = - w;
3155    z->state->nowrap = 1;
3156  }
3157
3158  /* set window size */
3159  if (w < 8 || w > 15)
3160  {
3161    inflateEnd(z);
3162    return Z_STREAM_ERROR;
3163  }
3164  z->state->wbits = (uInt)w;
3165
3166  /* create inflate_blocks state */
3167  if ((z->state->blocks =
3168      inflate_blocks_new(z, z->state->nowrap ? Z_NULL : adler32, (uInt)1 << w))
3169      == Z_NULL)
3170  {
3171    inflateEnd(z);
3172    return Z_MEM_ERROR;
3173  }
3174  Trace((stderr, "inflate: allocated\n"));
3175
3176  /* reset state */
3177  inflateReset(z);
3178  return Z_OK;
3179}
3180
3181
3182int inflateInit_(z, version, stream_size)
3183z_streamp z;
3184const char *version;
3185int stream_size;
3186{
3187  return inflateInit2_(z, DEF_WBITS, version, stream_size);
3188}
3189
3190
3191#define NEEDBYTE {if(z->avail_in==0)goto empty;r=Z_OK;}
3192#define NEXTBYTE (z->avail_in--,z->total_in++,*z->next_in++)
3193
3194int inflate(z, f)
3195z_streamp z;
3196int f;
3197{
3198  int r;
3199  uInt b;
3200
3201  if (z == Z_NULL || z->state == Z_NULL || z->next_in == Z_NULL || f < 0)
3202    return Z_STREAM_ERROR;
3203  r = Z_BUF_ERROR;
3204  while (1) switch (z->state->mode)
3205  {
3206    case METHOD:
3207      NEEDBYTE
3208      if (((z->state->sub.method = NEXTBYTE) & 0xf) != Z_DEFLATED)
3209      {
3210        z->state->mode = BAD;
3211        z->msg = (char*)"unknown compression method";
3212        z->state->sub.marker = 5;       /* can't try inflateSync */
3213        break;
3214      }
3215      if ((z->state->sub.method >> 4) + 8 > z->state->wbits)
3216      {
3217        z->state->mode = BAD;
3218        z->msg = (char*)"invalid window size";
3219        z->state->sub.marker = 5;       /* can't try inflateSync */
3220        break;
3221      }
3222      z->state->mode = FLAG;
3223    case FLAG:
3224      NEEDBYTE
3225      b = NEXTBYTE;
3226      if (((z->state->sub.method << 8) + b) % 31)
3227      {
3228        z->state->mode = BAD;
3229        z->msg = (char*)"incorrect header check";
3230        z->state->sub.marker = 5;       /* can't try inflateSync */
3231        break;
3232      }
3233      Trace((stderr, "inflate: zlib header ok\n"));
3234      if (!(b & PRESET_DICT))
3235      {
3236        z->state->mode = BLOCKS;
3237	break;
3238      }
3239      z->state->mode = DICT4;
3240    case DICT4:
3241      NEEDBYTE
3242      z->state->sub.check.need = (uLong)NEXTBYTE << 24;
3243      z->state->mode = DICT3;
3244    case DICT3:
3245      NEEDBYTE
3246      z->state->sub.check.need += (uLong)NEXTBYTE << 16;
3247      z->state->mode = DICT2;
3248    case DICT2:
3249      NEEDBYTE
3250      z->state->sub.check.need += (uLong)NEXTBYTE << 8;
3251      z->state->mode = DICT1;
3252    case DICT1:
3253      NEEDBYTE
3254      z->state->sub.check.need += (uLong)NEXTBYTE;
3255      z->adler = z->state->sub.check.need;
3256      z->state->mode = DICT0;
3257      return Z_NEED_DICT;
3258    case DICT0:
3259      z->state->mode = BAD;
3260      z->msg = (char*)"need dictionary";
3261      z->state->sub.marker = 0;       /* can try inflateSync */
3262      return Z_STREAM_ERROR;
3263    case BLOCKS:
3264      r = inflate_blocks(z->state->blocks, z, r);
3265      if (f == Z_PACKET_FLUSH && z->avail_in == 0 && z->avail_out != 0)
3266	  r = inflate_packet_flush(z->state->blocks);
3267      if (r == Z_DATA_ERROR)
3268      {
3269        z->state->mode = BAD;
3270        z->state->sub.marker = 0;       /* can try inflateSync */
3271        break;
3272      }
3273      if (r != Z_STREAM_END)
3274        return r;
3275      r = Z_OK;
3276      inflate_blocks_reset(z->state->blocks, z, &z->state->sub.check.was);
3277      if (z->state->nowrap)
3278      {
3279        z->state->mode = DONE;
3280        break;
3281      }
3282      z->state->mode = CHECK4;
3283    case CHECK4:
3284      NEEDBYTE
3285      z->state->sub.check.need = (uLong)NEXTBYTE << 24;
3286      z->state->mode = CHECK3;
3287    case CHECK3:
3288      NEEDBYTE
3289      z->state->sub.check.need += (uLong)NEXTBYTE << 16;
3290      z->state->mode = CHECK2;
3291    case CHECK2:
3292      NEEDBYTE
3293      z->state->sub.check.need += (uLong)NEXTBYTE << 8;
3294      z->state->mode = CHECK1;
3295    case CHECK1:
3296      NEEDBYTE
3297      z->state->sub.check.need += (uLong)NEXTBYTE;
3298
3299      if (z->state->sub.check.was != z->state->sub.check.need)
3300      {
3301        z->state->mode = BAD;
3302        z->msg = (char*)"incorrect data check";
3303        z->state->sub.marker = 5;       /* can't try inflateSync */
3304        break;
3305      }
3306      Trace((stderr, "inflate: zlib check ok\n"));
3307      z->state->mode = DONE;
3308    case DONE:
3309      return Z_STREAM_END;
3310    case BAD:
3311      return Z_DATA_ERROR;
3312    default:
3313      return Z_STREAM_ERROR;
3314  }
3315
3316 empty:
3317  if (f != Z_PACKET_FLUSH)
3318    return r;
3319  z->state->mode = BAD;
3320  z->msg = (char *)"need more for packet flush";
3321  z->state->sub.marker = 0;       /* can try inflateSync */
3322  return Z_DATA_ERROR;
3323}
3324
3325
3326int inflateSetDictionary(z, dictionary, dictLength)
3327z_streamp z;
3328const Bytef *dictionary;
3329uInt  dictLength;
3330{
3331  uInt length = dictLength;
3332
3333  if (z == Z_NULL || z->state == Z_NULL || z->state->mode != DICT0)
3334    return Z_STREAM_ERROR;
3335
3336  if (adler32(1L, dictionary, dictLength) != z->adler) return Z_DATA_ERROR;
3337  z->adler = 1L;
3338
3339  if (length >= ((uInt)1<<z->state->wbits))
3340  {
3341    length = (1<<z->state->wbits)-1;
3342    dictionary += dictLength - length;
3343  }
3344  inflate_set_dictionary(z->state->blocks, dictionary, length);
3345  z->state->mode = BLOCKS;
3346  return Z_OK;
3347}
3348
3349/*
3350 * This subroutine adds the data at next_in/avail_in to the output history
3351 * without performing any output.  The output buffer must be "caught up";
3352 * i.e. no pending output (hence s->read equals s->write), and the state must
3353 * be BLOCKS (i.e. we should be willing to see the start of a series of
3354 * BLOCKS).  On exit, the output will also be caught up, and the checksum
3355 * will have been updated if need be.
3356 */
3357
3358int inflateIncomp(z)
3359z_stream *z;
3360{
3361    if (z->state->mode != BLOCKS)
3362	return Z_DATA_ERROR;
3363    return inflate_addhistory(z->state->blocks, z);
3364}
3365
3366
3367int inflateSync(z)
3368z_streamp z;
3369{
3370  uInt n;       /* number of bytes to look at */
3371  Bytef *p;     /* pointer to bytes */
3372  uInt m;       /* number of marker bytes found in a row */
3373  uLong r, w;   /* temporaries to save total_in and total_out */
3374
3375  /* set up */
3376  if (z == Z_NULL || z->state == Z_NULL)
3377    return Z_STREAM_ERROR;
3378  if (z->state->mode != BAD)
3379  {
3380    z->state->mode = BAD;
3381    z->state->sub.marker = 0;
3382  }
3383  if ((n = z->avail_in) == 0)
3384    return Z_BUF_ERROR;
3385  p = z->next_in;
3386  m = z->state->sub.marker;
3387
3388  /* search */
3389  while (n && m < 4)
3390  {
3391    if (*p == (Byte)(m < 2 ? 0 : 0xff))
3392      m++;
3393    else if (*p)
3394      m = 0;
3395    else
3396      m = 4 - m;
3397    p++, n--;
3398  }
3399
3400  /* restore */
3401  z->total_in += p - z->next_in;
3402  z->next_in = p;
3403  z->avail_in = n;
3404  z->state->sub.marker = m;
3405
3406  /* return no joy or set up to restart on a new block */
3407  if (m != 4)
3408    return Z_DATA_ERROR;
3409  r = z->total_in;  w = z->total_out;
3410  inflateReset(z);
3411  z->total_in = r;  z->total_out = w;
3412  z->state->mode = BLOCKS;
3413  return Z_OK;
3414}
3415
3416#undef NEEDBYTE
3417#undef NEXTBYTE
3418/* --- inflate.c */
3419
3420/* +++ infblock.c */
3421/* infblock.c -- interpret and process block types to last block
3422 * Copyright (C) 1995-1996 Mark Adler
3423 * For conditions of distribution and use, see copyright notice in zlib.h
3424 */
3425
3426/* #include "zutil.h" */
3427/* #include "infblock.h" */
3428
3429/* +++ inftrees.h */
3430/* inftrees.h -- header to use inftrees.c
3431 * Copyright (C) 1995-1996 Mark Adler
3432 * For conditions of distribution and use, see copyright notice in zlib.h
3433 */
3434
3435/* WARNING: this file should *not* be used by applications. It is
3436   part of the implementation of the compression library and is
3437   subject to change. Applications should only use zlib.h.
3438 */
3439
3440/* Huffman code lookup table entry--this entry is four bytes for machines
3441   that have 16-bit pointers (e.g. PC's in the small or medium model). */
3442
3443typedef struct inflate_huft_s FAR inflate_huft;
3444
3445struct inflate_huft_s {
3446  union {
3447    struct {
3448      Byte Exop;        /* number of extra bits or operation */
3449      Byte Bits;        /* number of bits in this code or subcode */
3450    } what;
3451    Bytef *pad;         /* pad structure to a power of 2 (4 bytes for */
3452  } word;               /*  16-bit, 8 bytes for 32-bit machines) */
3453  union {
3454    uInt Base;          /* literal, length base, or distance base */
3455    inflate_huft *Next; /* pointer to next level of table */
3456  } more;
3457};
3458
3459#ifdef DEBUG_ZLIB
3460  extern uInt inflate_hufts;
3461#endif
3462
3463extern int inflate_trees_bits OF((
3464    uIntf *,                    /* 19 code lengths */
3465    uIntf *,                    /* bits tree desired/actual depth */
3466    inflate_huft * FAR *,       /* bits tree result */
3467    z_streamp ));               /* for zalloc, zfree functions */
3468
3469extern int inflate_trees_dynamic OF((
3470    uInt,                       /* number of literal/length codes */
3471    uInt,                       /* number of distance codes */
3472    uIntf *,                    /* that many (total) code lengths */
3473    uIntf *,                    /* literal desired/actual bit depth */
3474    uIntf *,                    /* distance desired/actual bit depth */
3475    inflate_huft * FAR *,       /* literal/length tree result */
3476    inflate_huft * FAR *,       /* distance tree result */
3477    z_streamp ));               /* for zalloc, zfree functions */
3478
3479extern int inflate_trees_fixed OF((
3480    uIntf *,                    /* literal desired/actual bit depth */
3481    uIntf *,                    /* distance desired/actual bit depth */
3482    inflate_huft * FAR *,       /* literal/length tree result */
3483    inflate_huft * FAR *));     /* distance tree result */
3484
3485extern int inflate_trees_free OF((
3486    inflate_huft *,             /* tables to free */
3487    z_streamp ));               /* for zfree function */
3488
3489/* --- inftrees.h */
3490
3491/* +++ infcodes.h */
3492/* infcodes.h -- header to use infcodes.c
3493 * Copyright (C) 1995-1996 Mark Adler
3494 * For conditions of distribution and use, see copyright notice in zlib.h
3495 */
3496
3497/* WARNING: this file should *not* be used by applications. It is
3498   part of the implementation of the compression library and is
3499   subject to change. Applications should only use zlib.h.
3500 */
3501
3502struct inflate_codes_state;
3503typedef struct inflate_codes_state FAR inflate_codes_statef;
3504
3505extern inflate_codes_statef *inflate_codes_new OF((
3506    uInt, uInt,
3507    inflate_huft *, inflate_huft *,
3508    z_streamp ));
3509
3510extern int inflate_codes OF((
3511    inflate_blocks_statef *,
3512    z_streamp ,
3513    int));
3514
3515extern void inflate_codes_free OF((
3516    inflate_codes_statef *,
3517    z_streamp ));
3518
3519/* --- infcodes.h */
3520
3521/* +++ infutil.h */
3522/* infutil.h -- types and macros common to blocks and codes
3523 * Copyright (C) 1995-1996 Mark Adler
3524 * For conditions of distribution and use, see copyright notice in zlib.h
3525 */
3526
3527/* WARNING: this file should *not* be used by applications. It is
3528   part of the implementation of the compression library and is
3529   subject to change. Applications should only use zlib.h.
3530 */
3531
3532#ifndef _INFUTIL_H
3533#define _INFUTIL_H
3534
3535typedef enum {
3536      TYPE,     /* get type bits (3, including end bit) */
3537      LENS,     /* get lengths for stored */
3538      STORED,   /* processing stored block */
3539      TABLE,    /* get table lengths */
3540      BTREE,    /* get bit lengths tree for a dynamic block */
3541      DTREE,    /* get length, distance trees for a dynamic block */
3542      CODES,    /* processing fixed or dynamic block */
3543      DRY,      /* output remaining window bytes */
3544      DONEB,    /* finished last block, done */
3545      BADB}     /* got a data error--stuck here */
3546inflate_block_mode;
3547
3548/* inflate blocks semi-private state */
3549struct inflate_blocks_state {
3550
3551  /* mode */
3552  inflate_block_mode  mode;     /* current inflate_block mode */
3553
3554  /* mode dependent information */
3555  union {
3556    uInt left;          /* if STORED, bytes left to copy */
3557    struct {
3558      uInt table;               /* table lengths (14 bits) */
3559      uInt index;               /* index into blens (or border) */
3560      uIntf *blens;             /* bit lengths of codes */
3561      uInt bb;                  /* bit length tree depth */
3562      inflate_huft *tb;         /* bit length decoding tree */
3563    } trees;            /* if DTREE, decoding info for trees */
3564    struct {
3565      inflate_huft *tl;
3566      inflate_huft *td;         /* trees to free */
3567      inflate_codes_statef
3568         *codes;
3569    } decode;           /* if CODES, current state */
3570  } sub;                /* submode */
3571  uInt last;            /* true if this block is the last block */
3572
3573  /* mode independent information */
3574  uInt bitk;            /* bits in bit buffer */
3575  uLong bitb;           /* bit buffer */
3576  Bytef *window;        /* sliding window */
3577  Bytef *end;           /* one byte after sliding window */
3578  Bytef *read;          /* window read pointer */
3579  Bytef *write;         /* window write pointer */
3580  check_func checkfn;   /* check function */
3581  uLong check;          /* check on output */
3582
3583};
3584
3585
3586/* defines for inflate input/output */
3587/*   update pointers and return */
3588#define UPDBITS {s->bitb=b;s->bitk=k;}
3589#define UPDIN {z->avail_in=n;z->total_in+=p-z->next_in;z->next_in=p;}
3590#define UPDOUT {s->write=q;}
3591#define UPDATE {UPDBITS UPDIN UPDOUT}
3592#define LEAVE {UPDATE return inflate_flush(s,z,r);}
3593/*   get bytes and bits */
3594#define LOADIN {p=z->next_in;n=z->avail_in;b=s->bitb;k=s->bitk;}
3595#define NEEDBYTE {if(n)r=Z_OK;else LEAVE}
3596#define NEXTBYTE (n--,*p++)
3597#define NEEDBITS(j) {while(k<(j)){NEEDBYTE;b|=((uLong)NEXTBYTE)<<k;k+=8;}}
3598#define DUMPBITS(j) {b>>=(j);k-=(j);}
3599/*   output bytes */
3600#define WAVAIL (uInt)(q<s->read?s->read-q-1:s->end-q)
3601#define LOADOUT {q=s->write;m=(uInt)WAVAIL;}
3602#define WWRAP {if(q==s->end&&s->read!=s->window){q=s->window;m=(uInt)WAVAIL;}}
3603#define FLUSH {UPDOUT r=inflate_flush(s,z,r); LOADOUT}
3604#define NEEDOUT {if(m==0){WWRAP if(m==0){FLUSH WWRAP if(m==0) LEAVE}}r=Z_OK;}
3605#define OUTBYTE(a) {*q++=(Byte)(a);m--;}
3606/*   load local pointers */
3607#define LOAD {LOADIN LOADOUT}
3608
3609/* masks for lower bits (size given to avoid silly warnings with Visual C++) */
3610extern uInt inflate_mask[17];
3611
3612/* copy as much as possible from the sliding window to the output area */
3613extern int inflate_flush OF((
3614    inflate_blocks_statef *,
3615    z_streamp ,
3616    int));
3617
3618#ifndef NO_DUMMY_DECL
3619struct internal_state      {int dummy;}; /* for buggy compilers */
3620#endif
3621
3622#endif
3623/* --- infutil.h */
3624
3625#ifndef NO_DUMMY_DECL
3626struct inflate_codes_state {int dummy;}; /* for buggy compilers */
3627#endif
3628
3629/* Table for deflate from PKZIP's appnote.txt. */
3630local const uInt border[] = { /* Order of the bit length code lengths */
3631        16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15};
3632
3633/*
3634   Notes beyond the 1.93a appnote.txt:
3635
3636   1. Distance pointers never point before the beginning of the output
3637      stream.
3638   2. Distance pointers can point back across blocks, up to 32k away.
3639   3. There is an implied maximum of 7 bits for the bit length table and
3640      15 bits for the actual data.
3641   4. If only one code exists, then it is encoded using one bit.  (Zero
3642      would be more efficient, but perhaps a little confusing.)  If two
3643      codes exist, they are coded using one bit each (0 and 1).
3644   5. There is no way of sending zero distance codes--a dummy must be
3645      sent if there are none.  (History: a pre 2.0 version of PKZIP would
3646      store blocks with no distance codes, but this was discovered to be
3647      too harsh a criterion.)  Valid only for 1.93a.  2.04c does allow
3648      zero distance codes, which is sent as one code of zero bits in
3649      length.
3650   6. There are up to 286 literal/length codes.  Code 256 represents the
3651      end-of-block.  Note however that the static length tree defines
3652      288 codes just to fill out the Huffman codes.  Codes 286 and 287
3653      cannot be used though, since there is no length base or extra bits
3654      defined for them.  Similarily, there are up to 30 distance codes.
3655      However, static trees define 32 codes (all 5 bits) to fill out the
3656      Huffman codes, but the last two had better not show up in the data.
3657   7. Unzip can check dynamic Huffman blocks for complete code sets.
3658      The exception is that a single code would not be complete (see #4).
3659   8. The five bits following the block type is really the number of
3660      literal codes sent minus 257.
3661   9. Length codes 8,16,16 are interpreted as 13 length codes of 8 bits
3662      (1+6+6).  Therefore, to output three times the length, you output
3663      three codes (1+1+1), whereas to output four times the same length,
3664      you only need two codes (1+3).  Hmm.
3665  10. In the tree reconstruction algorithm, Code = Code + Increment
3666      only if BitLength(i) is not zero.  (Pretty obvious.)
3667  11. Correction: 4 Bits: # of Bit Length codes - 4     (4 - 19)
3668  12. Note: length code 284 can represent 227-258, but length code 285
3669      really is 258.  The last length deserves its own, short code
3670      since it gets used a lot in very redundant files.  The length
3671      258 is special since 258 - 3 (the min match length) is 255.
3672  13. The literal/length and distance code bit lengths are read as a
3673      single stream of lengths.  It is possible (and advantageous) for
3674      a repeat code (16, 17, or 18) to go across the boundary between
3675      the two sets of lengths.
3676 */
3677
3678
3679void inflate_blocks_reset(s, z, c)
3680inflate_blocks_statef *s;
3681z_streamp z;
3682uLongf *c;
3683{
3684  if (s->checkfn != Z_NULL)
3685    *c = s->check;
3686  if (s->mode == BTREE || s->mode == DTREE)
3687    ZFREE(z, s->sub.trees.blens);
3688  if (s->mode == CODES)
3689  {
3690    inflate_codes_free(s->sub.decode.codes, z);
3691    inflate_trees_free(s->sub.decode.td, z);
3692    inflate_trees_free(s->sub.decode.tl, z);
3693  }
3694  s->mode = TYPE;
3695  s->bitk = 0;
3696  s->bitb = 0;
3697  s->read = s->write = s->window;
3698  if (s->checkfn != Z_NULL)
3699    z->adler = s->check = (*s->checkfn)(0L, Z_NULL, 0);
3700  Trace((stderr, "inflate:   blocks reset\n"));
3701}
3702
3703
3704inflate_blocks_statef *inflate_blocks_new(z, c, w)
3705z_streamp z;
3706check_func c;
3707uInt w;
3708{
3709  inflate_blocks_statef *s;
3710
3711  if ((s = (inflate_blocks_statef *)ZALLOC
3712       (z,1,sizeof(struct inflate_blocks_state))) == Z_NULL)
3713    return s;
3714  if ((s->window = (Bytef *)ZALLOC(z, 1, w)) == Z_NULL)
3715  {
3716    ZFREE(z, s);
3717    return Z_NULL;
3718  }
3719  s->end = s->window + w;
3720  s->checkfn = c;
3721  s->mode = TYPE;
3722  Trace((stderr, "inflate:   blocks allocated\n"));
3723  inflate_blocks_reset(s, z, &s->check);
3724  return s;
3725}
3726
3727
3728#ifdef DEBUG_ZLIB
3729  extern uInt inflate_hufts;
3730#endif
3731int inflate_blocks(s, z, r)
3732inflate_blocks_statef *s;
3733z_streamp z;
3734int r;
3735{
3736  uInt t;               /* temporary storage */
3737  uLong b;              /* bit buffer */
3738  uInt k;               /* bits in bit buffer */
3739  Bytef *p;             /* input data pointer */
3740  uInt n;               /* bytes available there */
3741  Bytef *q;             /* output window write pointer */
3742  uInt m;               /* bytes to end of window or read pointer */
3743
3744  /* copy input/output information to locals (UPDATE macro restores) */
3745  LOAD
3746
3747  /* process input based on current state */
3748  while (1) switch (s->mode)
3749  {
3750    case TYPE:
3751      NEEDBITS(3)
3752      t = (uInt)b & 7;
3753      s->last = t & 1;
3754      switch (t >> 1)
3755      {
3756        case 0:                         /* stored */
3757          Trace((stderr, "inflate:     stored block%s\n",
3758                 s->last ? " (last)" : ""));
3759          DUMPBITS(3)
3760          t = k & 7;                    /* go to byte boundary */
3761          DUMPBITS(t)
3762          s->mode = LENS;               /* get length of stored block */
3763          break;
3764        case 1:                         /* fixed */
3765          Trace((stderr, "inflate:     fixed codes block%s\n",
3766                 s->last ? " (last)" : ""));
3767          {
3768            uInt bl, bd;
3769            inflate_huft *tl, *td;
3770
3771            inflate_trees_fixed(&bl, &bd, &tl, &td);
3772            s->sub.decode.codes = inflate_codes_new(bl, bd, tl, td, z);
3773            if (s->sub.decode.codes == Z_NULL)
3774            {
3775              r = Z_MEM_ERROR;
3776              LEAVE
3777            }
3778            s->sub.decode.tl = Z_NULL;  /* don't try to free these */
3779            s->sub.decode.td = Z_NULL;
3780          }
3781          DUMPBITS(3)
3782          s->mode = CODES;
3783          break;
3784        case 2:                         /* dynamic */
3785          Trace((stderr, "inflate:     dynamic codes block%s\n",
3786                 s->last ? " (last)" : ""));
3787          DUMPBITS(3)
3788          s->mode = TABLE;
3789          break;
3790        case 3:                         /* illegal */
3791          DUMPBITS(3)
3792          s->mode = BADB;
3793          z->msg = (char*)"invalid block type";
3794          r = Z_DATA_ERROR;
3795          LEAVE
3796      }
3797      break;
3798    case LENS:
3799      NEEDBITS(32)
3800      if ((((~b) >> 16) & 0xffff) != (b & 0xffff))
3801      {
3802        s->mode = BADB;
3803        z->msg = (char*)"invalid stored block lengths";
3804        r = Z_DATA_ERROR;
3805        LEAVE
3806      }
3807      s->sub.left = (uInt)b & 0xffff;
3808      b = k = 0;                      /* dump bits */
3809      Tracev((stderr, "inflate:       stored length %u\n", s->sub.left));
3810      s->mode = s->sub.left ? STORED : (s->last ? DRY : TYPE);
3811      break;
3812    case STORED:
3813      if (n == 0)
3814        LEAVE
3815      NEEDOUT
3816      t = s->sub.left;
3817      if (t > n) t = n;
3818      if (t > m) t = m;
3819      zmemcpy(q, p, t);
3820      p += t;  n -= t;
3821      q += t;  m -= t;
3822      if ((s->sub.left -= t) != 0)
3823        break;
3824      Tracev((stderr, "inflate:       stored end, %lu total out\n",
3825              z->total_out + (q >= s->read ? q - s->read :
3826              (s->end - s->read) + (q - s->window))));
3827      s->mode = s->last ? DRY : TYPE;
3828      break;
3829    case TABLE:
3830      NEEDBITS(14)
3831      s->sub.trees.table = t = (uInt)b & 0x3fff;
3832#ifndef PKZIP_BUG_WORKAROUND
3833      if ((t & 0x1f) > 29 || ((t >> 5) & 0x1f) > 29)
3834      {
3835        s->mode = BADB;
3836        z->msg = (char*)"too many length or distance symbols";
3837        r = Z_DATA_ERROR;
3838        LEAVE
3839      }
3840#endif
3841      t = 258 + (t & 0x1f) + ((t >> 5) & 0x1f);
3842      if (t < 19)
3843        t = 19;
3844      if ((s->sub.trees.blens = (uIntf*)ZALLOC(z, t, sizeof(uInt))) == Z_NULL)
3845      {
3846        r = Z_MEM_ERROR;
3847        LEAVE
3848      }
3849      DUMPBITS(14)
3850      s->sub.trees.index = 0;
3851      Tracev((stderr, "inflate:       table sizes ok\n"));
3852      s->mode = BTREE;
3853    case BTREE:
3854      while (s->sub.trees.index < 4 + (s->sub.trees.table >> 10))
3855      {
3856        NEEDBITS(3)
3857        s->sub.trees.blens[border[s->sub.trees.index++]] = (uInt)b & 7;
3858        DUMPBITS(3)
3859      }
3860      while (s->sub.trees.index < 19)
3861        s->sub.trees.blens[border[s->sub.trees.index++]] = 0;
3862      s->sub.trees.bb = 7;
3863      t = inflate_trees_bits(s->sub.trees.blens, &s->sub.trees.bb,
3864                             &s->sub.trees.tb, z);
3865      if (t != Z_OK)
3866      {
3867        ZFREE(z, s->sub.trees.blens);
3868        r = t;
3869        if (r == Z_DATA_ERROR)
3870          s->mode = BADB;
3871        LEAVE
3872      }
3873      s->sub.trees.index = 0;
3874      Tracev((stderr, "inflate:       bits tree ok\n"));
3875      s->mode = DTREE;
3876    case DTREE:
3877      while (t = s->sub.trees.table,
3878             s->sub.trees.index < 258 + (t & 0x1f) + ((t >> 5) & 0x1f))
3879      {
3880        inflate_huft *h;
3881        uInt i, j, c;
3882
3883        t = s->sub.trees.bb;
3884        NEEDBITS(t)
3885        h = s->sub.trees.tb + ((uInt)b & inflate_mask[t]);
3886        t = h->word.what.Bits;
3887        c = h->more.Base;
3888        if (c < 16)
3889        {
3890          DUMPBITS(t)
3891          s->sub.trees.blens[s->sub.trees.index++] = c;
3892        }
3893        else /* c == 16..18 */
3894        {
3895          i = c == 18 ? 7 : c - 14;
3896          j = c == 18 ? 11 : 3;
3897          NEEDBITS(t + i)
3898          DUMPBITS(t)
3899          j += (uInt)b & inflate_mask[i];
3900          DUMPBITS(i)
3901          i = s->sub.trees.index;
3902          t = s->sub.trees.table;
3903          if (i + j > 258 + (t & 0x1f) + ((t >> 5) & 0x1f) ||
3904              (c == 16 && i < 1))
3905          {
3906            inflate_trees_free(s->sub.trees.tb, z);
3907            ZFREE(z, s->sub.trees.blens);
3908            s->mode = BADB;
3909            z->msg = (char*)"invalid bit length repeat";
3910            r = Z_DATA_ERROR;
3911            LEAVE
3912          }
3913          c = c == 16 ? s->sub.trees.blens[i - 1] : 0;
3914          do {
3915            s->sub.trees.blens[i++] = c;
3916          } while (--j);
3917          s->sub.trees.index = i;
3918        }
3919      }
3920      inflate_trees_free(s->sub.trees.tb, z);
3921      s->sub.trees.tb = Z_NULL;
3922      {
3923        uInt bl, bd;
3924        inflate_huft *tl, *td;
3925        inflate_codes_statef *c;
3926
3927        bl = 9;         /* must be <= 9 for lookahead assumptions */
3928        bd = 6;         /* must be <= 9 for lookahead assumptions */
3929        t = s->sub.trees.table;
3930#ifdef DEBUG_ZLIB
3931      inflate_hufts = 0;
3932#endif
3933        t = inflate_trees_dynamic(257 + (t & 0x1f), 1 + ((t >> 5) & 0x1f),
3934                                  s->sub.trees.blens, &bl, &bd, &tl, &td, z);
3935        ZFREE(z, s->sub.trees.blens);
3936        if (t != Z_OK)
3937        {
3938          if (t == (uInt)Z_DATA_ERROR)
3939            s->mode = BADB;
3940          r = t;
3941          LEAVE
3942        }
3943        Tracev((stderr, "inflate:       trees ok, %d * %d bytes used\n",
3944              inflate_hufts, sizeof(inflate_huft)));
3945        if ((c = inflate_codes_new(bl, bd, tl, td, z)) == Z_NULL)
3946        {
3947          inflate_trees_free(td, z);
3948          inflate_trees_free(tl, z);
3949          r = Z_MEM_ERROR;
3950          LEAVE
3951        }
3952        s->sub.decode.codes = c;
3953        s->sub.decode.tl = tl;
3954        s->sub.decode.td = td;
3955      }
3956      s->mode = CODES;
3957    case CODES:
3958      UPDATE
3959      if ((r = inflate_codes(s, z, r)) != Z_STREAM_END)
3960        return inflate_flush(s, z, r);
3961      r = Z_OK;
3962      inflate_codes_free(s->sub.decode.codes, z);
3963      inflate_trees_free(s->sub.decode.td, z);
3964      inflate_trees_free(s->sub.decode.tl, z);
3965      LOAD
3966      Tracev((stderr, "inflate:       codes end, %lu total out\n",
3967              z->total_out + (q >= s->read ? q - s->read :
3968              (s->end - s->read) + (q - s->window))));
3969      if (!s->last)
3970      {
3971        s->mode = TYPE;
3972        break;
3973      }
3974      if (k > 7)              /* return unused byte, if any */
3975      {
3976        Assert(k < 16, "inflate_codes grabbed too many bytes")
3977        k -= 8;
3978        n++;
3979        p--;                    /* can always return one */
3980      }
3981      s->mode = DRY;
3982    case DRY:
3983      FLUSH
3984      if (s->read != s->write)
3985        LEAVE
3986      s->mode = DONEB;
3987    case DONEB:
3988      r = Z_STREAM_END;
3989      LEAVE
3990    case BADB:
3991      r = Z_DATA_ERROR;
3992      LEAVE
3993    default:
3994      r = Z_STREAM_ERROR;
3995      LEAVE
3996  }
3997}
3998
3999
4000int inflate_blocks_free(s, z, c)
4001inflate_blocks_statef *s;
4002z_streamp z;
4003uLongf *c;
4004{
4005  inflate_blocks_reset(s, z, c);
4006  ZFREE(z, s->window);
4007  ZFREE(z, s);
4008  Trace((stderr, "inflate:   blocks freed\n"));
4009  return Z_OK;
4010}
4011
4012
4013void inflate_set_dictionary(s, d, n)
4014inflate_blocks_statef *s;
4015const Bytef *d;
4016uInt  n;
4017{
4018  zmemcpy((charf *)s->window, d, n);
4019  s->read = s->write = s->window + n;
4020}
4021
4022/*
4023 * This subroutine adds the data at next_in/avail_in to the output history
4024 * without performing any output.  The output buffer must be "caught up";
4025 * i.e. no pending output (hence s->read equals s->write), and the state must
4026 * be BLOCKS (i.e. we should be willing to see the start of a series of
4027 * BLOCKS).  On exit, the output will also be caught up, and the checksum
4028 * will have been updated if need be.
4029 */
4030int inflate_addhistory(s, z)
4031inflate_blocks_statef *s;
4032z_stream *z;
4033{
4034    uLong b;              /* bit buffer */  /* NOT USED HERE */
4035    uInt k;               /* bits in bit buffer */ /* NOT USED HERE */
4036    uInt t;               /* temporary storage */
4037    Bytef *p;             /* input data pointer */
4038    uInt n;               /* bytes available there */
4039    Bytef *q;             /* output window write pointer */
4040    uInt m;               /* bytes to end of window or read pointer */
4041
4042    if (s->read != s->write)
4043	return Z_STREAM_ERROR;
4044    if (s->mode != TYPE)
4045	return Z_DATA_ERROR;
4046
4047    /* we're ready to rock */
4048    LOAD
4049    /* while there is input ready, copy to output buffer, moving
4050     * pointers as needed.
4051     */
4052    while (n) {
4053	t = n;  /* how many to do */
4054	/* is there room until end of buffer? */
4055	if (t > m) t = m;
4056	/* update check information */
4057	if (s->checkfn != Z_NULL)
4058	    s->check = (*s->checkfn)(s->check, q, t);
4059	zmemcpy(q, p, t);
4060	q += t;
4061	p += t;
4062	n -= t;
4063	z->total_out += t;
4064	s->read = q;    /* drag read pointer forward */
4065/*      WWRAP  */ 	/* expand WWRAP macro by hand to handle s->read */
4066	if (q == s->end) {
4067	    s->read = q = s->window;
4068	    m = WAVAIL;
4069	}
4070    }
4071    UPDATE
4072    return Z_OK;
4073}
4074
4075
4076/*
4077 * At the end of a Deflate-compressed PPP packet, we expect to have seen
4078 * a `stored' block type value but not the (zero) length bytes.
4079 */
4080int inflate_packet_flush(s)
4081    inflate_blocks_statef *s;
4082{
4083    if (s->mode != LENS)
4084	return Z_DATA_ERROR;
4085    s->mode = TYPE;
4086    return Z_OK;
4087}
4088/* --- infblock.c */
4089
4090/* +++ inftrees.c */
4091/* inftrees.c -- generate Huffman trees for efficient decoding
4092 * Copyright (C) 1995-1996 Mark Adler
4093 * For conditions of distribution and use, see copyright notice in zlib.h
4094 */
4095
4096/* #include "zutil.h" */
4097/* #include "inftrees.h" */
4098
4099char inflate_copyright[] = " inflate 1.0.4 Copyright 1995-1996 Mark Adler ";
4100/*
4101  If you use the zlib library in a product, an acknowledgment is welcome
4102  in the documentation of your product. If for some reason you cannot
4103  include such an acknowledgment, I would appreciate that you keep this
4104  copyright string in the executable of your product.
4105 */
4106
4107#ifndef NO_DUMMY_DECL
4108struct internal_state  {int dummy;}; /* for buggy compilers */
4109#endif
4110
4111/* simplify the use of the inflate_huft type with some defines */
4112#define base more.Base
4113#define next more.Next
4114#define exop word.what.Exop
4115#define bits word.what.Bits
4116
4117
4118local int huft_build OF((
4119    uIntf *,            /* code lengths in bits */
4120    uInt,               /* number of codes */
4121    uInt,               /* number of "simple" codes */
4122    const uIntf *,      /* list of base values for non-simple codes */
4123    const uIntf *,      /* list of extra bits for non-simple codes */
4124    inflate_huft * FAR*,/* result: starting table */
4125    uIntf *,            /* maximum lookup bits (returns actual) */
4126    z_streamp ));       /* for zalloc function */
4127
4128local voidpf falloc OF((
4129    voidpf,             /* opaque pointer (not used) */
4130    uInt,               /* number of items */
4131    uInt));             /* size of item */
4132
4133/* Tables for deflate from PKZIP's appnote.txt. */
4134local const uInt cplens[31] = { /* Copy lengths for literal codes 257..285 */
4135        3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31,
4136        35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0};
4137        /* see note #13 above about 258 */
4138local const uInt cplext[31] = { /* Extra bits for literal codes 257..285 */
4139        0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2,
4140        3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0, 112, 112}; /* 112==invalid */
4141local const uInt cpdist[30] = { /* Copy offsets for distance codes 0..29 */
4142        1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193,
4143        257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145,
4144        8193, 12289, 16385, 24577};
4145local const uInt cpdext[30] = { /* Extra bits for distance codes */
4146        0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6,
4147        7, 7, 8, 8, 9, 9, 10, 10, 11, 11,
4148        12, 12, 13, 13};
4149
4150/*
4151   Huffman code decoding is performed using a multi-level table lookup.
4152   The fastest way to decode is to simply build a lookup table whose
4153   size is determined by the longest code.  However, the time it takes
4154   to build this table can also be a factor if the data being decoded
4155   is not very long.  The most common codes are necessarily the
4156   shortest codes, so those codes dominate the decoding time, and hence
4157   the speed.  The idea is you can have a shorter table that decodes the
4158   shorter, more probable codes, and then point to subsidiary tables for
4159   the longer codes.  The time it costs to decode the longer codes is
4160   then traded against the time it takes to make longer tables.
4161
4162   This results of this trade are in the variables lbits and dbits
4163   below.  lbits is the number of bits the first level table for literal/
4164   length codes can decode in one step, and dbits is the same thing for
4165   the distance codes.  Subsequent tables are also less than or equal to
4166   those sizes.  These values may be adjusted either when all of the
4167   codes are shorter than that, in which case the longest code length in
4168   bits is used, or when the shortest code is *longer* than the requested
4169   table size, in which case the length of the shortest code in bits is
4170   used.
4171
4172   There are two different values for the two tables, since they code a
4173   different number of possibilities each.  The literal/length table
4174   codes 286 possible values, or in a flat code, a little over eight
4175   bits.  The distance table codes 30 possible values, or a little less
4176   than five bits, flat.  The optimum values for speed end up being
4177   about one bit more than those, so lbits is 8+1 and dbits is 5+1.
4178   The optimum values may differ though from machine to machine, and
4179   possibly even between compilers.  Your mileage may vary.
4180 */
4181
4182
4183/* If BMAX needs to be larger than 16, then h and x[] should be uLong. */
4184#define BMAX 15         /* maximum bit length of any code */
4185#define N_MAX 288       /* maximum number of codes in any set */
4186
4187#ifdef DEBUG_ZLIB
4188  uInt inflate_hufts;
4189#endif
4190
4191local int huft_build(b, n, s, d, e, t, m, zs)
4192uIntf *b;               /* code lengths in bits (all assumed <= BMAX) */
4193uInt n;                 /* number of codes (assumed <= N_MAX) */
4194uInt s;                 /* number of simple-valued codes (0..s-1) */
4195const uIntf *d;         /* list of base values for non-simple codes */
4196const uIntf *e;         /* list of extra bits for non-simple codes */
4197inflate_huft * FAR *t;  /* result: starting table */
4198uIntf *m;               /* maximum lookup bits, returns actual */
4199z_streamp zs;           /* for zalloc function */
4200/* Given a list of code lengths and a maximum table size, make a set of
4201   tables to decode that set of codes.  Return Z_OK on success, Z_BUF_ERROR
4202   if the given code set is incomplete (the tables are still built in this
4203   case), Z_DATA_ERROR if the input is invalid (an over-subscribed set of
4204   lengths), or Z_MEM_ERROR if not enough memory. */
4205{
4206
4207  uInt a;                       /* counter for codes of length k */
4208  uInt c[BMAX+1];               /* bit length count table */
4209  uInt f;                       /* i repeats in table every f entries */
4210  int g;                        /* maximum code length */
4211  int h;                        /* table level */
4212  register uInt i;              /* counter, current code */
4213  register uInt j;              /* counter */
4214  register int k;               /* number of bits in current code */
4215  int l;                        /* bits per table (returned in m) */
4216  register uIntf *p;            /* pointer into c[], b[], or v[] */
4217  inflate_huft *q;              /* points to current table */
4218  struct inflate_huft_s r;      /* table entry for structure assignment */
4219  inflate_huft *u[BMAX];        /* table stack */
4220  uInt v[N_MAX];                /* values in order of bit length */
4221  register int w;               /* bits before this table == (l * h) */
4222  uInt x[BMAX+1];               /* bit offsets, then code stack */
4223  uIntf *xp;                    /* pointer into x */
4224  int y;                        /* number of dummy codes added */
4225  uInt z;                       /* number of entries in current table */
4226
4227
4228  /* Generate counts for each bit length */
4229  p = c;
4230#define C0 *p++ = 0;
4231#define C2 C0 C0 C0 C0
4232#define C4 C2 C2 C2 C2
4233  C4                            /* clear c[]--assume BMAX+1 is 16 */
4234  p = b;  i = n;
4235  do {
4236    c[*p++]++;                  /* assume all entries <= BMAX */
4237  } while (--i);
4238  if (c[0] == n)                /* null input--all zero length codes */
4239  {
4240    *t = (inflate_huft *)Z_NULL;
4241    *m = 0;
4242    return Z_OK;
4243  }
4244
4245
4246  /* Find minimum and maximum length, bound *m by those */
4247  l = *m;
4248  for (j = 1; j <= BMAX; j++)
4249    if (c[j])
4250      break;
4251  k = j;                        /* minimum code length */
4252  if ((uInt)l < j)
4253    l = j;
4254  for (i = BMAX; i; i--)
4255    if (c[i])
4256      break;
4257  g = i;                        /* maximum code length */
4258  if ((uInt)l > i)
4259    l = i;
4260  *m = l;
4261
4262
4263  /* Adjust last length count to fill out codes, if needed */
4264  for (y = 1 << j; j < i; j++, y <<= 1)
4265    if ((y -= c[j]) < 0)
4266      return Z_DATA_ERROR;
4267  if ((y -= c[i]) < 0)
4268    return Z_DATA_ERROR;
4269  c[i] += y;
4270
4271
4272  /* Generate starting offsets into the value table for each length */
4273  x[1] = j = 0;
4274  p = c + 1;  xp = x + 2;
4275  while (--i) {                 /* note that i == g from above */
4276    *xp++ = (j += *p++);
4277  }
4278
4279
4280  /* Make a table of values in order of bit lengths */
4281  p = b;  i = 0;
4282  do {
4283    if ((j = *p++) != 0)
4284      v[x[j]++] = i;
4285  } while (++i < n);
4286  n = x[g];                   /* set n to length of v */
4287
4288
4289  /* Generate the Huffman codes and for each, make the table entries */
4290  x[0] = i = 0;                 /* first Huffman code is zero */
4291  p = v;                        /* grab values in bit order */
4292  h = -1;                       /* no tables yet--level -1 */
4293  w = -l;                       /* bits decoded == (l * h) */
4294  u[0] = (inflate_huft *)Z_NULL;        /* just to keep compilers happy */
4295  q = (inflate_huft *)Z_NULL;   /* ditto */
4296  z = 0;                        /* ditto */
4297
4298  /* go through the bit lengths (k already is bits in shortest code) */
4299  for (; k <= g; k++)
4300  {
4301    a = c[k];
4302    while (a--)
4303    {
4304      /* here i is the Huffman code of length k bits for value *p */
4305      /* make tables up to required level */
4306      while (k > w + l)
4307      {
4308        h++;
4309        w += l;                 /* previous table always l bits */
4310
4311        /* compute minimum size table less than or equal to l bits */
4312        z = g - w;
4313        z = z > (uInt)l ? l : z;        /* table size upper limit */
4314        if ((f = 1 << (j = k - w)) > a + 1)     /* try a k-w bit table */
4315        {                       /* too few codes for k-w bit table */
4316          f -= a + 1;           /* deduct codes from patterns left */
4317          xp = c + k;
4318          if (j < z)
4319            while (++j < z)     /* try smaller tables up to z bits */
4320            {
4321              if ((f <<= 1) <= *++xp)
4322                break;          /* enough codes to use up j bits */
4323              f -= *xp;         /* else deduct codes from patterns */
4324            }
4325        }
4326        z = 1 << j;             /* table entries for j-bit table */
4327
4328        /* allocate and link in new table */
4329        if ((q = (inflate_huft *)ZALLOC
4330             (zs,z + 1,sizeof(inflate_huft))) == Z_NULL)
4331        {
4332          if (h)
4333            inflate_trees_free(u[0], zs);
4334          return Z_MEM_ERROR;   /* not enough memory */
4335        }
4336#ifdef DEBUG_ZLIB
4337        inflate_hufts += z + 1;
4338#endif
4339        *t = q + 1;             /* link to list for huft_free() */
4340        *(t = &(q->next)) = Z_NULL;
4341        u[h] = ++q;             /* table starts after link */
4342
4343        /* connect to last table, if there is one */
4344        if (h)
4345        {
4346          x[h] = i;             /* save pattern for backing up */
4347          r.bits = (Byte)l;     /* bits to dump before this table */
4348          r.exop = (Byte)j;     /* bits in this table */
4349          r.next = q;           /* pointer to this table */
4350          j = i >> (w - l);     /* (get around Turbo C bug) */
4351          u[h-1][j] = r;        /* connect to last table */
4352        }
4353      }
4354
4355      /* set up table entry in r */
4356      r.bits = (Byte)(k - w);
4357      if (p >= v + n)
4358        r.exop = 128 + 64;      /* out of values--invalid code */
4359      else if (*p < s)
4360      {
4361        r.exop = (Byte)(*p < 256 ? 0 : 32 + 64);     /* 256 is end-of-block */
4362        r.base = *p++;          /* simple code is just the value */
4363      }
4364      else
4365      {
4366        r.exop = (Byte)(e[*p - s] + 16 + 64);/* non-simple--look up in lists */
4367        r.base = d[*p++ - s];
4368      }
4369
4370      /* fill code-like entries with r */
4371      f = 1 << (k - w);
4372      for (j = i >> w; j < z; j += f)
4373        q[j] = r;
4374
4375      /* backwards increment the k-bit code i */
4376      for (j = 1 << (k - 1); i & j; j >>= 1)
4377        i ^= j;
4378      i ^= j;
4379
4380      /* backup over finished tables */
4381      while ((i & ((1 << w) - 1)) != x[h])
4382      {
4383        h--;                    /* don't need to update q */
4384        w -= l;
4385      }
4386    }
4387  }
4388
4389
4390  /* Return Z_BUF_ERROR if we were given an incomplete table */
4391  return y != 0 && g != 1 ? Z_BUF_ERROR : Z_OK;
4392}
4393
4394
4395int inflate_trees_bits(c, bb, tb, z)
4396uIntf *c;               /* 19 code lengths */
4397uIntf *bb;              /* bits tree desired/actual depth */
4398inflate_huft * FAR *tb; /* bits tree result */
4399z_streamp z;            /* for zfree function */
4400{
4401  int r;
4402
4403  r = huft_build(c, 19, 19, (uIntf*)Z_NULL, (uIntf*)Z_NULL, tb, bb, z);
4404  if (r == Z_DATA_ERROR)
4405    z->msg = (char*)"oversubscribed dynamic bit lengths tree";
4406  else if (r == Z_BUF_ERROR || *bb == 0)
4407  {
4408    inflate_trees_free(*tb, z);
4409    z->msg = (char*)"incomplete dynamic bit lengths tree";
4410    r = Z_DATA_ERROR;
4411  }
4412  return r;
4413}
4414
4415
4416int inflate_trees_dynamic(nl, nd, c, bl, bd, tl, td, z)
4417uInt nl;                /* number of literal/length codes */
4418uInt nd;                /* number of distance codes */
4419uIntf *c;               /* that many (total) code lengths */
4420uIntf *bl;              /* literal desired/actual bit depth */
4421uIntf *bd;              /* distance desired/actual bit depth */
4422inflate_huft * FAR *tl; /* literal/length tree result */
4423inflate_huft * FAR *td; /* distance tree result */
4424z_streamp z;            /* for zfree function */
4425{
4426  int r;
4427
4428  /* build literal/length tree */
4429  r = huft_build(c, nl, 257, cplens, cplext, tl, bl, z);
4430  if (r != Z_OK || *bl == 0)
4431  {
4432    if (r == Z_DATA_ERROR)
4433      z->msg = (char*)"oversubscribed literal/length tree";
4434    else if (r != Z_MEM_ERROR)
4435    {
4436      inflate_trees_free(*tl, z);
4437      z->msg = (char*)"incomplete literal/length tree";
4438      r = Z_DATA_ERROR;
4439    }
4440    return r;
4441  }
4442
4443  /* build distance tree */
4444  r = huft_build(c + nl, nd, 0, cpdist, cpdext, td, bd, z);
4445  if (r != Z_OK || (*bd == 0 && nl > 257))
4446  {
4447    if (r == Z_DATA_ERROR)
4448      z->msg = (char*)"oversubscribed distance tree";
4449    else if (r == Z_BUF_ERROR) {
4450#ifdef PKZIP_BUG_WORKAROUND
4451      r = Z_OK;
4452    }
4453#else
4454      inflate_trees_free(*td, z);
4455      z->msg = (char*)"incomplete distance tree";
4456      r = Z_DATA_ERROR;
4457    }
4458    else if (r != Z_MEM_ERROR)
4459    {
4460      z->msg = (char*)"empty distance tree with lengths";
4461      r = Z_DATA_ERROR;
4462    }
4463    inflate_trees_free(*tl, z);
4464    return r;
4465#endif
4466  }
4467
4468  /* done */
4469  return Z_OK;
4470}
4471
4472
4473/* build fixed tables only once--keep them here */
4474local int fixed_built = 0;
4475#define FIXEDH 530      /* number of hufts used by fixed tables */
4476local inflate_huft fixed_mem[FIXEDH];
4477local uInt fixed_bl;
4478local uInt fixed_bd;
4479local inflate_huft *fixed_tl;
4480local inflate_huft *fixed_td;
4481
4482
4483local voidpf falloc(q, n, s)
4484voidpf q;       /* opaque pointer */
4485uInt n;         /* number of items */
4486uInt s;         /* size of item */
4487{
4488  Assert(s == sizeof(inflate_huft) && n <= *(intf *)q,
4489         "inflate_trees falloc overflow");
4490  *(intf *)q -= n+s-s; /* s-s to avoid warning */
4491  return (voidpf)(fixed_mem + *(intf *)q);
4492}
4493
4494
4495int inflate_trees_fixed(bl, bd, tl, td)
4496uIntf *bl;               /* literal desired/actual bit depth */
4497uIntf *bd;               /* distance desired/actual bit depth */
4498inflate_huft * FAR *tl;  /* literal/length tree result */
4499inflate_huft * FAR *td;  /* distance tree result */
4500{
4501  /* build fixed tables if not already (multiple overlapped executions ok) */
4502  if (!fixed_built)
4503  {
4504    int k;              /* temporary variable */
4505    unsigned c[288];    /* length list for huft_build */
4506    z_stream z;         /* for falloc function */
4507    int f = FIXEDH;     /* number of hufts left in fixed_mem */
4508
4509    /* set up fake z_stream for memory routines */
4510    z.zalloc = falloc;
4511    z.zfree = Z_NULL;
4512    z.opaque = (voidpf)&f;
4513
4514    /* literal table */
4515    for (k = 0; k < 144; k++)
4516      c[k] = 8;
4517    for (; k < 256; k++)
4518      c[k] = 9;
4519    for (; k < 280; k++)
4520      c[k] = 7;
4521    for (; k < 288; k++)
4522      c[k] = 8;
4523    fixed_bl = 7;
4524    huft_build(c, 288, 257, cplens, cplext, &fixed_tl, &fixed_bl, &z);
4525
4526    /* distance table */
4527    for (k = 0; k < 30; k++)
4528      c[k] = 5;
4529    fixed_bd = 5;
4530    huft_build(c, 30, 0, cpdist, cpdext, &fixed_td, &fixed_bd, &z);
4531
4532    /* done */
4533    Assert(f == 0, "invalid build of fixed tables");
4534    fixed_built = 1;
4535  }
4536  *bl = fixed_bl;
4537  *bd = fixed_bd;
4538  *tl = fixed_tl;
4539  *td = fixed_td;
4540  return Z_OK;
4541}
4542
4543
4544int inflate_trees_free(t, z)
4545inflate_huft *t;        /* table to free */
4546z_streamp z;            /* for zfree function */
4547/* Free the malloc'ed tables built by huft_build(), which makes a linked
4548   list of the tables it made, with the links in a dummy first entry of
4549   each table. */
4550{
4551  register inflate_huft *p, *q, *r;
4552
4553  /* Reverse linked list */
4554  p = Z_NULL;
4555  q = t;
4556  while (q != Z_NULL)
4557  {
4558    r = (q - 1)->next;
4559    (q - 1)->next = p;
4560    p = q;
4561    q = r;
4562  }
4563  /* Go through linked list, freeing from the malloced (t[-1]) address. */
4564  while (p != Z_NULL)
4565  {
4566    q = (--p)->next;
4567    ZFREE(z,p);
4568    p = q;
4569  }
4570  return Z_OK;
4571}
4572/* --- inftrees.c */
4573
4574/* +++ infcodes.c */
4575/* infcodes.c -- process literals and length/distance pairs
4576 * Copyright (C) 1995-1996 Mark Adler
4577 * For conditions of distribution and use, see copyright notice in zlib.h
4578 */
4579
4580/* #include "zutil.h" */
4581/* #include "inftrees.h" */
4582/* #include "infblock.h" */
4583/* #include "infcodes.h" */
4584/* #include "infutil.h" */
4585
4586/* +++ inffast.h */
4587/* inffast.h -- header to use inffast.c
4588 * Copyright (C) 1995-1996 Mark Adler
4589 * For conditions of distribution and use, see copyright notice in zlib.h
4590 */
4591
4592/* WARNING: this file should *not* be used by applications. It is
4593   part of the implementation of the compression library and is
4594   subject to change. Applications should only use zlib.h.
4595 */
4596
4597extern int inflate_fast OF((
4598    uInt,
4599    uInt,
4600    inflate_huft *,
4601    inflate_huft *,
4602    inflate_blocks_statef *,
4603    z_streamp ));
4604/* --- inffast.h */
4605
4606/* simplify the use of the inflate_huft type with some defines */
4607#define base more.Base
4608#define next more.Next
4609#define exop word.what.Exop
4610#define bits word.what.Bits
4611
4612/* inflate codes private state */
4613struct inflate_codes_state {
4614
4615  /* mode */
4616  enum {        /* waiting for "i:"=input, "o:"=output, "x:"=nothing */
4617      START,    /* x: set up for LEN */
4618      LEN,      /* i: get length/literal/eob next */
4619      LENEXT,   /* i: getting length extra (have base) */
4620      DIST,     /* i: get distance next */
4621      DISTEXT,  /* i: getting distance extra */
4622      COPY,     /* o: copying bytes in window, waiting for space */
4623      LIT,      /* o: got literal, waiting for output space */
4624      WASH,     /* o: got eob, possibly still output waiting */
4625      END,      /* x: got eob and all data flushed */
4626      BADCODE}  /* x: got error */
4627    mode;               /* current inflate_codes mode */
4628
4629  /* mode dependent information */
4630  uInt len;
4631  union {
4632    struct {
4633      inflate_huft *tree;       /* pointer into tree */
4634      uInt need;                /* bits needed */
4635    } code;             /* if LEN or DIST, where in tree */
4636    uInt lit;           /* if LIT, literal */
4637    struct {
4638      uInt get;                 /* bits to get for extra */
4639      uInt dist;                /* distance back to copy from */
4640    } copy;             /* if EXT or COPY, where and how much */
4641  } sub;                /* submode */
4642
4643  /* mode independent information */
4644  Byte lbits;           /* ltree bits decoded per branch */
4645  Byte dbits;           /* dtree bits decoder per branch */
4646  inflate_huft *ltree;          /* literal/length/eob tree */
4647  inflate_huft *dtree;          /* distance tree */
4648
4649};
4650
4651
4652inflate_codes_statef *inflate_codes_new(bl, bd, tl, td, z)
4653uInt bl, bd;
4654inflate_huft *tl;
4655inflate_huft *td; /* need separate declaration for Borland C++ */
4656z_streamp z;
4657{
4658  inflate_codes_statef *c;
4659
4660  if ((c = (inflate_codes_statef *)
4661       ZALLOC(z,1,sizeof(struct inflate_codes_state))) != Z_NULL)
4662  {
4663    c->mode = START;
4664    c->lbits = (Byte)bl;
4665    c->dbits = (Byte)bd;
4666    c->ltree = tl;
4667    c->dtree = td;
4668    Tracev((stderr, "inflate:       codes new\n"));
4669  }
4670  return c;
4671}
4672
4673
4674int inflate_codes(s, z, r)
4675inflate_blocks_statef *s;
4676z_streamp z;
4677int r;
4678{
4679  uInt j;               /* temporary storage */
4680  inflate_huft *t;      /* temporary pointer */
4681  uInt e;               /* extra bits or operation */
4682  uLong b;              /* bit buffer */
4683  uInt k;               /* bits in bit buffer */
4684  Bytef *p;             /* input data pointer */
4685  uInt n;               /* bytes available there */
4686  Bytef *q;             /* output window write pointer */
4687  uInt m;               /* bytes to end of window or read pointer */
4688  Bytef *f;             /* pointer to copy strings from */
4689  inflate_codes_statef *c = s->sub.decode.codes;  /* codes state */
4690
4691  /* copy input/output information to locals (UPDATE macro restores) */
4692  LOAD
4693
4694  /* process input and output based on current state */
4695  while (1) switch (c->mode)
4696  {             /* waiting for "i:"=input, "o:"=output, "x:"=nothing */
4697    case START:         /* x: set up for LEN */
4698#ifndef SLOW
4699      if (m >= 258 && n >= 10)
4700      {
4701        UPDATE
4702        r = inflate_fast(c->lbits, c->dbits, c->ltree, c->dtree, s, z);
4703        LOAD
4704        if (r != Z_OK)
4705        {
4706          c->mode = r == Z_STREAM_END ? WASH : BADCODE;
4707          break;
4708        }
4709      }
4710#endif /* !SLOW */
4711      c->sub.code.need = c->lbits;
4712      c->sub.code.tree = c->ltree;
4713      c->mode = LEN;
4714    case LEN:           /* i: get length/literal/eob next */
4715      j = c->sub.code.need;
4716      NEEDBITS(j)
4717      t = c->sub.code.tree + ((uInt)b & inflate_mask[j]);
4718      DUMPBITS(t->bits)
4719      e = (uInt)(t->exop);
4720      if (e == 0)               /* literal */
4721      {
4722        c->sub.lit = t->base;
4723        Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ?
4724                 "inflate:         literal '%c'\n" :
4725                 "inflate:         literal 0x%02x\n", t->base));
4726        c->mode = LIT;
4727        break;
4728      }
4729      if (e & 16)               /* length */
4730      {
4731        c->sub.copy.get = e & 15;
4732        c->len = t->base;
4733        c->mode = LENEXT;
4734        break;
4735      }
4736      if ((e & 64) == 0)        /* next table */
4737      {
4738        c->sub.code.need = e;
4739        c->sub.code.tree = t->next;
4740        break;
4741      }
4742      if (e & 32)               /* end of block */
4743      {
4744        Tracevv((stderr, "inflate:         end of block\n"));
4745        c->mode = WASH;
4746        break;
4747      }
4748      c->mode = BADCODE;        /* invalid code */
4749      z->msg = (char*)"invalid literal/length code";
4750      r = Z_DATA_ERROR;
4751      LEAVE
4752    case LENEXT:        /* i: getting length extra (have base) */
4753      j = c->sub.copy.get;
4754      NEEDBITS(j)
4755      c->len += (uInt)b & inflate_mask[j];
4756      DUMPBITS(j)
4757      c->sub.code.need = c->dbits;
4758      c->sub.code.tree = c->dtree;
4759      Tracevv((stderr, "inflate:         length %u\n", c->len));
4760      c->mode = DIST;
4761    case DIST:          /* i: get distance next */
4762      j = c->sub.code.need;
4763      NEEDBITS(j)
4764      t = c->sub.code.tree + ((uInt)b & inflate_mask[j]);
4765      DUMPBITS(t->bits)
4766      e = (uInt)(t->exop);
4767      if (e & 16)               /* distance */
4768      {
4769        c->sub.copy.get = e & 15;
4770        c->sub.copy.dist = t->base;
4771        c->mode = DISTEXT;
4772        break;
4773      }
4774      if ((e & 64) == 0)        /* next table */
4775      {
4776        c->sub.code.need = e;
4777        c->sub.code.tree = t->next;
4778        break;
4779      }
4780      c->mode = BADCODE;        /* invalid code */
4781      z->msg = (char*)"invalid distance code";
4782      r = Z_DATA_ERROR;
4783      LEAVE
4784    case DISTEXT:       /* i: getting distance extra */
4785      j = c->sub.copy.get;
4786      NEEDBITS(j)
4787      c->sub.copy.dist += (uInt)b & inflate_mask[j];
4788      DUMPBITS(j)
4789      Tracevv((stderr, "inflate:         distance %u\n", c->sub.copy.dist));
4790      c->mode = COPY;
4791    case COPY:          /* o: copying bytes in window, waiting for space */
4792#ifndef __TURBOC__ /* Turbo C bug for following expression */
4793      f = (uInt)(q - s->window) < c->sub.copy.dist ?
4794          s->end - (c->sub.copy.dist - (q - s->window)) :
4795          q - c->sub.copy.dist;
4796#else
4797      f = q - c->sub.copy.dist;
4798      if ((uInt)(q - s->window) < c->sub.copy.dist)
4799        f = s->end - (c->sub.copy.dist - (uInt)(q - s->window));
4800#endif
4801      while (c->len)
4802      {
4803        NEEDOUT
4804        OUTBYTE(*f++)
4805        if (f == s->end)
4806          f = s->window;
4807        c->len--;
4808      }
4809      c->mode = START;
4810      break;
4811    case LIT:           /* o: got literal, waiting for output space */
4812      NEEDOUT
4813      OUTBYTE(c->sub.lit)
4814      c->mode = START;
4815      break;
4816    case WASH:          /* o: got eob, possibly more output */
4817      FLUSH
4818      if (s->read != s->write)
4819        LEAVE
4820      c->mode = END;
4821    case END:
4822      r = Z_STREAM_END;
4823      LEAVE
4824    case BADCODE:       /* x: got error */
4825      r = Z_DATA_ERROR;
4826      LEAVE
4827    default:
4828      r = Z_STREAM_ERROR;
4829      LEAVE
4830  }
4831}
4832
4833
4834void inflate_codes_free(c, z)
4835inflate_codes_statef *c;
4836z_streamp z;
4837{
4838  ZFREE(z, c);
4839  Tracev((stderr, "inflate:       codes free\n"));
4840}
4841/* --- infcodes.c */
4842
4843/* +++ infutil.c */
4844/* inflate_util.c -- data and routines common to blocks and codes
4845 * Copyright (C) 1995-1996 Mark Adler
4846 * For conditions of distribution and use, see copyright notice in zlib.h
4847 */
4848
4849/* #include "zutil.h" */
4850/* #include "infblock.h" */
4851/* #include "inftrees.h" */
4852/* #include "infcodes.h" */
4853/* #include "infutil.h" */
4854
4855#ifndef NO_DUMMY_DECL
4856struct inflate_codes_state {int dummy;}; /* for buggy compilers */
4857#endif
4858
4859/* And'ing with mask[n] masks the lower n bits */
4860uInt inflate_mask[17] = {
4861    0x0000,
4862    0x0001, 0x0003, 0x0007, 0x000f, 0x001f, 0x003f, 0x007f, 0x00ff,
4863    0x01ff, 0x03ff, 0x07ff, 0x0fff, 0x1fff, 0x3fff, 0x7fff, 0xffff
4864};
4865
4866
4867/* copy as much as possible from the sliding window to the output area */
4868int inflate_flush(s, z, r)
4869inflate_blocks_statef *s;
4870z_streamp z;
4871int r;
4872{
4873  uInt n;
4874  Bytef *p;
4875  Bytef *q;
4876
4877  /* local copies of source and destination pointers */
4878  p = z->next_out;
4879  q = s->read;
4880
4881  /* compute number of bytes to copy as far as end of window */
4882  n = (uInt)((q <= s->write ? s->write : s->end) - q);
4883  if (n > z->avail_out) n = z->avail_out;
4884  if (n && r == Z_BUF_ERROR) r = Z_OK;
4885
4886  /* update counters */
4887  z->avail_out -= n;
4888  z->total_out += n;
4889
4890  /* update check information */
4891  if (s->checkfn != Z_NULL)
4892    z->adler = s->check = (*s->checkfn)(s->check, q, n);
4893
4894  /* copy as far as end of window */
4895  if (p != Z_NULL) {
4896    zmemcpy(p, q, n);
4897    p += n;
4898  }
4899  q += n;
4900
4901  /* see if more to copy at beginning of window */
4902  if (q == s->end)
4903  {
4904    /* wrap pointers */
4905    q = s->window;
4906    if (s->write == s->end)
4907      s->write = s->window;
4908
4909    /* compute bytes to copy */
4910    n = (uInt)(s->write - q);
4911    if (n > z->avail_out) n = z->avail_out;
4912    if (n && r == Z_BUF_ERROR) r = Z_OK;
4913
4914    /* update counters */
4915    z->avail_out -= n;
4916    z->total_out += n;
4917
4918    /* update check information */
4919    if (s->checkfn != Z_NULL)
4920      z->adler = s->check = (*s->checkfn)(s->check, q, n);
4921
4922    /* copy */
4923    if (p != Z_NULL) {
4924      zmemcpy(p, q, n);
4925      p += n;
4926    }
4927    q += n;
4928  }
4929
4930  /* update pointers */
4931  z->next_out = p;
4932  s->read = q;
4933
4934  /* done */
4935  return r;
4936}
4937/* --- infutil.c */
4938
4939/* +++ inffast.c */
4940/* inffast.c -- process literals and length/distance pairs fast
4941 * Copyright (C) 1995-1996 Mark Adler
4942 * For conditions of distribution and use, see copyright notice in zlib.h
4943 */
4944
4945/* #include "zutil.h" */
4946/* #include "inftrees.h" */
4947/* #include "infblock.h" */
4948/* #include "infcodes.h" */
4949/* #include "infutil.h" */
4950/* #include "inffast.h" */
4951
4952#ifndef NO_DUMMY_DECL
4953struct inflate_codes_state {int dummy;}; /* for buggy compilers */
4954#endif
4955
4956/* simplify the use of the inflate_huft type with some defines */
4957#define base more.Base
4958#define next more.Next
4959#define exop word.what.Exop
4960#define bits word.what.Bits
4961
4962/* macros for bit input with no checking and for returning unused bytes */
4963#define GRABBITS(j) {while(k<(j)){b|=((uLong)NEXTBYTE)<<k;k+=8;}}
4964#define UNGRAB {n+=(c=k>>3);p-=c;k&=7;}
4965
4966/* Called with number of bytes left to write in window at least 258
4967   (the maximum string length) and number of input bytes available
4968   at least ten.  The ten bytes are six bytes for the longest length/
4969   distance pair plus four bytes for overloading the bit buffer. */
4970
4971int inflate_fast(bl, bd, tl, td, s, z)
4972uInt bl, bd;
4973inflate_huft *tl;
4974inflate_huft *td; /* need separate declaration for Borland C++ */
4975inflate_blocks_statef *s;
4976z_streamp z;
4977{
4978  inflate_huft *t;      /* temporary pointer */
4979  uInt e;               /* extra bits or operation */
4980  uLong b;              /* bit buffer */
4981  uInt k;               /* bits in bit buffer */
4982  Bytef *p;             /* input data pointer */
4983  uInt n;               /* bytes available there */
4984  Bytef *q;             /* output window write pointer */
4985  uInt m;               /* bytes to end of window or read pointer */
4986  uInt ml;              /* mask for literal/length tree */
4987  uInt md;              /* mask for distance tree */
4988  uInt c;               /* bytes to copy */
4989  uInt d;               /* distance back to copy from */
4990  Bytef *r;             /* copy source pointer */
4991
4992  /* load input, output, bit values */
4993  LOAD
4994
4995  /* initialize masks */
4996  ml = inflate_mask[bl];
4997  md = inflate_mask[bd];
4998
4999  /* do until not enough input or output space for fast loop */
5000  do {                          /* assume called with m >= 258 && n >= 10 */
5001    /* get literal/length code */
5002    GRABBITS(20)                /* max bits for literal/length code */
5003    if ((e = (t = tl + ((uInt)b & ml))->exop) == 0)
5004    {
5005      DUMPBITS(t->bits)
5006      Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ?
5007                "inflate:         * literal '%c'\n" :
5008                "inflate:         * literal 0x%02x\n", t->base));
5009      *q++ = (Byte)t->base;
5010      m--;
5011      continue;
5012    }
5013    do {
5014      DUMPBITS(t->bits)
5015      if (e & 16)
5016      {
5017        /* get extra bits for length */
5018        e &= 15;
5019        c = t->base + ((uInt)b & inflate_mask[e]);
5020        DUMPBITS(e)
5021        Tracevv((stderr, "inflate:         * length %u\n", c));
5022
5023        /* decode distance base of block to copy */
5024        GRABBITS(15);           /* max bits for distance code */
5025        e = (t = td + ((uInt)b & md))->exop;
5026        do {
5027          DUMPBITS(t->bits)
5028          if (e & 16)
5029          {
5030            /* get extra bits to add to distance base */
5031            e &= 15;
5032            GRABBITS(e)         /* get extra bits (up to 13) */
5033            d = t->base + ((uInt)b & inflate_mask[e]);
5034            DUMPBITS(e)
5035            Tracevv((stderr, "inflate:         * distance %u\n", d));
5036
5037            /* do the copy */
5038            m -= c;
5039            if ((uInt)(q - s->window) >= d)     /* offset before dest */
5040            {                                   /*  just copy */
5041              r = q - d;
5042              *q++ = *r++;  c--;        /* minimum count is three, */
5043              *q++ = *r++;  c--;        /*  so unroll loop a little */
5044            }
5045            else                        /* else offset after destination */
5046            {
5047              e = d - (uInt)(q - s->window); /* bytes from offset to end */
5048              r = s->end - e;           /* pointer to offset */
5049              if (c > e)                /* if source crosses, */
5050              {
5051                c -= e;                 /* copy to end of window */
5052                do {
5053                  *q++ = *r++;
5054                } while (--e);
5055                r = s->window;          /* copy rest from start of window */
5056              }
5057            }
5058            do {                        /* copy all or what's left */
5059              *q++ = *r++;
5060            } while (--c);
5061            break;
5062          }
5063          else if ((e & 64) == 0)
5064            e = (t = t->next + ((uInt)b & inflate_mask[e]))->exop;
5065          else
5066          {
5067            z->msg = (char*)"invalid distance code";
5068            UNGRAB
5069            UPDATE
5070            return Z_DATA_ERROR;
5071          }
5072        } while (1);
5073        break;
5074      }
5075      if ((e & 64) == 0)
5076      {
5077        if ((e = (t = t->next + ((uInt)b & inflate_mask[e]))->exop) == 0)
5078        {
5079          DUMPBITS(t->bits)
5080          Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ?
5081                    "inflate:         * literal '%c'\n" :
5082                    "inflate:         * literal 0x%02x\n", t->base));
5083          *q++ = (Byte)t->base;
5084          m--;
5085          break;
5086        }
5087      }
5088      else if (e & 32)
5089      {
5090        Tracevv((stderr, "inflate:         * end of block\n"));
5091        UNGRAB
5092        UPDATE
5093        return Z_STREAM_END;
5094      }
5095      else
5096      {
5097        z->msg = (char*)"invalid literal/length code";
5098        UNGRAB
5099        UPDATE
5100        return Z_DATA_ERROR;
5101      }
5102    } while (1);
5103  } while (m >= 258 && n >= 10);
5104
5105  /* not enough input or output--restore pointers and return */
5106  UNGRAB
5107  UPDATE
5108  return Z_OK;
5109}
5110/* --- inffast.c */
5111
5112/* +++ zutil.c */
5113/* zutil.c -- target dependent utility functions for the compression library
5114 * Copyright (C) 1995-1996 Jean-loup Gailly.
5115 * For conditions of distribution and use, see copyright notice in zlib.h
5116 */
5117
5118/* From: zutil.c,v 1.17 1996/07/24 13:41:12 me Exp $ */
5119
5120#ifdef DEBUG_ZLIB
5121#include <stdio.h>
5122#endif
5123
5124/* #include "zutil.h" */
5125
5126#ifndef NO_DUMMY_DECL
5127struct internal_state      {int dummy;}; /* for buggy compilers */
5128#endif
5129
5130#ifndef STDC
5131extern void exit OF((int));
5132#endif
5133
5134static const char *z_errmsg[10] = {
5135"need dictionary",     /* Z_NEED_DICT       2  */
5136"stream end",          /* Z_STREAM_END      1  */
5137"",                    /* Z_OK              0  */
5138"file error",          /* Z_ERRNO         (-1) */
5139"stream error",        /* Z_STREAM_ERROR  (-2) */
5140"data error",          /* Z_DATA_ERROR    (-3) */
5141"insufficient memory", /* Z_MEM_ERROR     (-4) */
5142"buffer error",        /* Z_BUF_ERROR     (-5) */
5143"incompatible version",/* Z_VERSION_ERROR (-6) */
5144""};
5145
5146
5147const char *zlibVersion()
5148{
5149    return ZLIB_VERSION;
5150}
5151
5152#ifdef DEBUG_ZLIB
5153void z_error (m)
5154    char *m;
5155{
5156    fprintf(stderr, "%s\n", m);
5157    exit(1);
5158}
5159#endif
5160
5161#ifndef HAVE_MEMCPY
5162
5163void zmemcpy(dest, source, len)
5164    Bytef* dest;
5165    Bytef* source;
5166    uInt  len;
5167{
5168    if (len == 0) return;
5169    do {
5170        *dest++ = *source++; /* ??? to be unrolled */
5171    } while (--len != 0);
5172}
5173
5174int zmemcmp(s1, s2, len)
5175    Bytef* s1;
5176    Bytef* s2;
5177    uInt  len;
5178{
5179    uInt j;
5180
5181    for (j = 0; j < len; j++) {
5182        if (s1[j] != s2[j]) return 2*(s1[j] > s2[j])-1;
5183    }
5184    return 0;
5185}
5186
5187void zmemzero(dest, len)
5188    Bytef* dest;
5189    uInt  len;
5190{
5191    if (len == 0) return;
5192    do {
5193        *dest++ = 0;  /* ??? to be unrolled */
5194    } while (--len != 0);
5195}
5196#endif
5197
5198#ifdef __TURBOC__
5199#if (defined( __BORLANDC__) || !defined(SMALL_MEDIUM)) && !defined(__32BIT__)
5200/* Small and medium model in Turbo C are for now limited to near allocation
5201 * with reduced MAX_WBITS and MAX_MEM_LEVEL
5202 */
5203#  define MY_ZCALLOC
5204
5205/* Turbo C malloc() does not allow dynamic allocation of 64K bytes
5206 * and farmalloc(64K) returns a pointer with an offset of 8, so we
5207 * must fix the pointer. Warning: the pointer must be put back to its
5208 * original form in order to free it, use zcfree().
5209 */
5210
5211#define MAX_PTR 10
5212/* 10*64K = 640K */
5213
5214local int next_ptr = 0;
5215
5216typedef struct ptr_table_s {
5217    voidpf org_ptr;
5218    voidpf new_ptr;
5219} ptr_table;
5220
5221local ptr_table table[MAX_PTR];
5222/* This table is used to remember the original form of pointers
5223 * to large buffers (64K). Such pointers are normalized with a zero offset.
5224 * Since MSDOS is not a preemptive multitasking OS, this table is not
5225 * protected from concurrent access. This hack doesn't work anyway on
5226 * a protected system like OS/2. Use Microsoft C instead.
5227 */
5228
5229voidpf zcalloc (voidpf opaque, unsigned items, unsigned size)
5230{
5231    voidpf buf = opaque; /* just to make some compilers happy */
5232    ulg bsize = (ulg)items*size;
5233
5234    /* If we allocate less than 65520 bytes, we assume that farmalloc
5235     * will return a usable pointer which doesn't have to be normalized.
5236     */
5237    if (bsize < 65520L) {
5238        buf = farmalloc(bsize);
5239        if (*(ush*)&buf != 0) return buf;
5240    } else {
5241        buf = farmalloc(bsize + 16L);
5242    }
5243    if (buf == NULL || next_ptr >= MAX_PTR) return NULL;
5244    table[next_ptr].org_ptr = buf;
5245
5246    /* Normalize the pointer to seg:0 */
5247    *((ush*)&buf+1) += ((ush)((uch*)buf-0) + 15) >> 4;
5248    *(ush*)&buf = 0;
5249    table[next_ptr++].new_ptr = buf;
5250    return buf;
5251}
5252
5253void  zcfree (voidpf opaque, voidpf ptr)
5254{
5255    int n;
5256    if (*(ush*)&ptr != 0) { /* object < 64K */
5257        farfree(ptr);
5258        return;
5259    }
5260    /* Find the original pointer */
5261    for (n = 0; n < next_ptr; n++) {
5262        if (ptr != table[n].new_ptr) continue;
5263
5264        farfree(table[n].org_ptr);
5265        while (++n < next_ptr) {
5266            table[n-1] = table[n];
5267        }
5268        next_ptr--;
5269        return;
5270    }
5271    ptr = opaque; /* just to make some compilers happy */
5272    Assert(0, "zcfree: ptr not found");
5273}
5274#endif
5275#endif /* __TURBOC__ */
5276
5277
5278#if defined(M_I86) && !defined(__32BIT__)
5279/* Microsoft C in 16-bit mode */
5280
5281#  define MY_ZCALLOC
5282
5283#if (!defined(_MSC_VER) || (_MSC_VER < 600))
5284#  define _halloc  halloc
5285#  define _hfree   hfree
5286#endif
5287
5288voidpf zcalloc (voidpf opaque, unsigned items, unsigned size)
5289{
5290    if (opaque) opaque = 0; /* to make compiler happy */
5291    return _halloc((long)items, size);
5292}
5293
5294void  zcfree (voidpf opaque, voidpf ptr)
5295{
5296    if (opaque) opaque = 0; /* to make compiler happy */
5297    _hfree(ptr);
5298}
5299
5300#endif /* MSC */
5301
5302
5303#ifndef MY_ZCALLOC /* Any system without a special alloc function */
5304
5305#ifndef STDC
5306extern voidp  calloc OF((uInt items, uInt size));
5307extern void   free   OF((voidpf ptr));
5308#endif
5309
5310voidpf zcalloc (opaque, items, size)
5311    voidpf opaque;
5312    unsigned items;
5313    unsigned size;
5314{
5315    if (opaque) items += size - size; /* make compiler happy */
5316    return (voidpf)calloc(items, size);
5317}
5318
5319void  zcfree (opaque, ptr)
5320    voidpf opaque;
5321    voidpf ptr;
5322{
5323    free(ptr);
5324    if (opaque) return; /* make compiler happy */
5325}
5326
5327#endif /* MY_ZCALLOC */
5328/* --- zutil.c */
5329
5330/* +++ adler32.c */
5331/* adler32.c -- compute the Adler-32 checksum of a data stream
5332 * Copyright (C) 1995-1996 Mark Adler
5333 * For conditions of distribution and use, see copyright notice in zlib.h
5334 */
5335
5336/* From: adler32.c,v 1.10 1996/05/22 11:52:18 me Exp $ */
5337
5338/* #include "zlib.h" */
5339
5340#define BASE 65521L /* largest prime smaller than 65536 */
5341#define NMAX 5552
5342/* NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1 */
5343
5344#define DO1(buf,i)  {s1 += buf[i]; s2 += s1;}
5345#define DO2(buf,i)  DO1(buf,i); DO1(buf,i+1);
5346#define DO4(buf,i)  DO2(buf,i); DO2(buf,i+2);
5347#define DO8(buf,i)  DO4(buf,i); DO4(buf,i+4);
5348#define DO16(buf)   DO8(buf,0); DO8(buf,8);
5349
5350/* ========================================================================= */
5351uLong adler32(adler, buf, len)
5352    uLong adler;
5353    const Bytef *buf;
5354    uInt len;
5355{
5356    unsigned long s1 = adler & 0xffff;
5357    unsigned long s2 = (adler >> 16) & 0xffff;
5358    int k;
5359
5360    if (buf == Z_NULL) return 1L;
5361
5362    while (len > 0) {
5363        k = len < NMAX ? len : NMAX;
5364        len -= k;
5365        while (k >= 16) {
5366            DO16(buf);
5367	    buf += 16;
5368            k -= 16;
5369        }
5370        if (k != 0) do {
5371            s1 += *buf++;
5372	    s2 += s1;
5373        } while (--k);
5374        s1 %= BASE;
5375        s2 %= BASE;
5376    }
5377    return (s2 << 16) | s1;
5378}
5379/* --- adler32.c */
5380