zlib.c revision 28989
1/*
2 * This file is derived from various .h and .c files from the zlib-0.95
3 * distribution by Jean-loup Gailly and Mark Adler, with some additions
4 * by Paul Mackerras to aid in implementing Deflate compression and
5 * decompression for PPP packets.  See zlib.h for conditions of
6 * distribution and use.
7 *
8 * Changes that have been made include:
9 * - changed functions not used outside this file to "local"
10 * - added minCompression parameter to deflateInit2
11 * - added Z_PACKET_FLUSH (see zlib.h for details)
12 * - added inflateIncomp
13 *
14 * $Id: zlib.c,v 1.3 1997/08/19 14:10:48 peter Exp $
15 */
16
17/*
18 *  ==FILEVERSION 970501==
19 *
20 * This marker is used by the Linux installation script to determine
21 * whether an up-to-date version of this file is already installed.
22 */
23
24/*+++++*/
25/* zutil.h -- internal interface and configuration of the compression library
26 * Copyright (C) 1995 Jean-loup Gailly.
27 * For conditions of distribution and use, see copyright notice in zlib.h
28 */
29
30/* WARNING: this file should *not* be used by applications. It is
31   part of the implementation of the compression library and is
32   subject to change. Applications should only use zlib.h.
33 */
34
35/* From: zutil.h,v 1.9 1995/05/03 17:27:12 jloup Exp */
36
37#define _Z_UTIL_H
38
39#ifdef KERNEL
40#include <net/zlib.h>
41#else
42#include "zlib.h"
43#endif
44
45#ifndef local
46#  define local static
47#endif
48/* compile with -Dlocal if your debugger can't find static symbols */
49
50#define FAR
51
52typedef unsigned char  uch;
53typedef uch FAR uchf;
54typedef unsigned short ush;
55typedef ush FAR ushf;
56typedef unsigned long  ulg;
57
58extern char *z_errmsg[]; /* indexed by 1-zlib_error */
59
60#define ERR_RETURN(strm,err) return (strm->msg=z_errmsg[1-err], err)
61/* To be used only when the state is known to be valid */
62
63#ifndef NULL
64#define NULL	((void *) 0)
65#endif
66
67        /* common constants */
68
69#define DEFLATED   8
70
71#ifndef DEF_WBITS
72#  define DEF_WBITS MAX_WBITS
73#endif
74/* default windowBits for decompression. MAX_WBITS is for compression only */
75
76#if MAX_MEM_LEVEL >= 8
77#  define DEF_MEM_LEVEL 8
78#else
79#  define DEF_MEM_LEVEL  MAX_MEM_LEVEL
80#endif
81/* default memLevel */
82
83#define STORED_BLOCK 0
84#define STATIC_TREES 1
85#define DYN_TREES    2
86/* The three kinds of block type */
87
88#define MIN_MATCH  3
89#define MAX_MATCH  258
90/* The minimum and maximum match lengths */
91
92         /* functions */
93
94#if defined(KERNEL) || defined(_KERNEL)
95#include <sys/types.h>
96#include <sys/systm.h>
97#  define zmemcpy(d, s, n)	bcopy((s), (d), (n))
98#  define zmemzero		bzero
99
100#else
101#if defined(__KERNEL__)
102/* Assume this is Linux */
103#include <linux/string.h>
104#define zmemcpy memcpy
105#define zmemzero(dest, len)	memset(dest, 0, len)
106
107#else /* not kernel */
108#if defined(STDC) && !defined(HAVE_MEMCPY) && !defined(NO_MEMCPY)
109#  define HAVE_MEMCPY
110#endif
111#ifdef HAVE_MEMCPY
112#    define zmemcpy memcpy
113#    define zmemzero(dest, len) memset(dest, 0, len)
114#else
115   extern void zmemcpy  OF((Bytef* dest, Bytef* source, uInt len));
116   extern void zmemzero OF((Bytef* dest, uInt len));
117#endif
118#endif	/* __KERNEL__ */
119#endif	/* KERNEL */
120
121/* Diagnostic functions */
122#ifdef DEBUG_ZLIB
123#  include <stdio.h>
124#  ifndef verbose
125#    define verbose 0
126#  endif
127#  define Assert(cond,msg) {if(!(cond)) z_error(msg);}
128#  define Trace(x) fprintf x
129#  define Tracev(x) {if (verbose) fprintf x ;}
130#  define Tracevv(x) {if (verbose>1) fprintf x ;}
131#  define Tracec(c,x) {if (verbose && (c)) fprintf x ;}
132#  define Tracecv(c,x) {if (verbose>1 && (c)) fprintf x ;}
133#else
134#  define Assert(cond,msg)
135#  define Trace(x)
136#  define Tracev(x)
137#  define Tracevv(x)
138#  define Tracec(c,x)
139#  define Tracecv(c,x)
140#endif
141
142
143typedef uLong (*check_func) OF((uLong check, Bytef *buf, uInt len));
144
145/* voidpf zcalloc OF((voidpf opaque, unsigned items, unsigned size)); */
146/* void   zcfree  OF((voidpf opaque, voidpf ptr)); */
147
148#define ZALLOC(strm, items, size) \
149           (*((strm)->zalloc))((strm)->opaque, (items), (size))
150#define ZALLOC_INIT(strm, items, size) \
151           (*((strm)->zalloc_init))((strm)->opaque, (items), (size))
152#define ZFREE(strm, addr, size)	\
153	   (*((strm)->zfree))((strm)->opaque, (voidpf)(addr), (size))
154#define TRY_FREE(s, p, n) {if (p) ZFREE(s, p, n);}
155
156/* deflate.h -- internal compression state
157 * Copyright (C) 1995 Jean-loup Gailly
158 * For conditions of distribution and use, see copyright notice in zlib.h
159 */
160
161/* WARNING: this file should *not* be used by applications. It is
162   part of the implementation of the compression library and is
163   subject to change. Applications should only use zlib.h.
164 */
165
166
167/*+++++*/
168/* From: deflate.h,v 1.5 1995/05/03 17:27:09 jloup Exp */
169
170/* ===========================================================================
171 * Internal compression state.
172 */
173
174/* Data type */
175#define Z_BINARY  0
176#define ASCII   1
177#define UNKNOWN 2
178
179#define LENGTH_CODES 29
180/* number of length codes, not counting the special END_BLOCK code */
181
182#define LITERALS  256
183/* number of literal bytes 0..255 */
184
185#define L_CODES (LITERALS+1+LENGTH_CODES)
186/* number of Literal or Length codes, including the END_BLOCK code */
187
188#define D_CODES   30
189/* number of distance codes */
190
191#define BL_CODES  19
192/* number of codes used to transfer the bit lengths */
193
194#define HEAP_SIZE (2*L_CODES+1)
195/* maximum heap size */
196
197#define MAX_BITS 15
198/* All codes must not exceed MAX_BITS bits */
199
200#define INIT_STATE    42
201#define BUSY_STATE   113
202#define FLUSH_STATE  124
203#define FINISH_STATE 666
204/* Stream status */
205
206
207/* Data structure describing a single value and its code string. */
208typedef struct ct_data_s {
209    union {
210        ush  freq;       /* frequency count */
211        ush  code;       /* bit string */
212    } fc;
213    union {
214        ush  dad;        /* father node in Huffman tree */
215        ush  len;        /* length of bit string */
216    } dl;
217} FAR ct_data;
218
219#define Freq fc.freq
220#define Code fc.code
221#define Dad  dl.dad
222#define Len  dl.len
223
224typedef struct static_tree_desc_s  static_tree_desc;
225
226typedef struct tree_desc_s {
227    ct_data *dyn_tree;           /* the dynamic tree */
228    int     max_code;            /* largest code with non zero frequency */
229    static_tree_desc *stat_desc; /* the corresponding static tree */
230} FAR tree_desc;
231
232typedef ush Pos;
233typedef Pos FAR Posf;
234typedef unsigned IPos;
235
236/* A Pos is an index in the character window. We use short instead of int to
237 * save space in the various tables. IPos is used only for parameter passing.
238 */
239
240typedef struct deflate_state {
241    z_stream *strm;      /* pointer back to this zlib stream */
242    int   status;        /* as the name implies */
243    Bytef *pending_buf;  /* output still pending */
244    Bytef *pending_out;  /* next pending byte to output to the stream */
245    int   pending;       /* nb of bytes in the pending buffer */
246    uLong adler;         /* adler32 of uncompressed data */
247    int   noheader;      /* suppress zlib header and adler32 */
248    Byte  data_type;     /* UNKNOWN, Z_BINARY or ASCII */
249    Byte  method;        /* STORED (for zip only) or DEFLATED */
250    int	  minCompr;	 /* min size decrease for Z_FLUSH_NOSTORE */
251
252                /* used by deflate.c: */
253
254    uInt  w_size;        /* LZ77 window size (32K by default) */
255    uInt  w_bits;        /* log2(w_size)  (8..16) */
256    uInt  w_mask;        /* w_size - 1 */
257
258    Bytef *window;
259    /* Sliding window. Input bytes are read into the second half of the window,
260     * and move to the first half later to keep a dictionary of at least wSize
261     * bytes. With this organization, matches are limited to a distance of
262     * wSize-MAX_MATCH bytes, but this ensures that IO is always
263     * performed with a length multiple of the block size. Also, it limits
264     * the window size to 64K, which is quite useful on MSDOS.
265     * To do: use the user input buffer as sliding window.
266     */
267
268    ulg window_size;
269    /* Actual size of window: 2*wSize, except when the user input buffer
270     * is directly used as sliding window.
271     */
272
273    Posf *prev;
274    /* Link to older string with same hash index. To limit the size of this
275     * array to 64K, this link is maintained only for the last 32K strings.
276     * An index in this array is thus a window index modulo 32K.
277     */
278
279    Posf *head; /* Heads of the hash chains or NIL. */
280
281    uInt  ins_h;          /* hash index of string to be inserted */
282    uInt  hash_size;      /* number of elements in hash table */
283    uInt  hash_bits;      /* log2(hash_size) */
284    uInt  hash_mask;      /* hash_size-1 */
285
286    uInt  hash_shift;
287    /* Number of bits by which ins_h must be shifted at each input
288     * step. It must be such that after MIN_MATCH steps, the oldest
289     * byte no longer takes part in the hash key, that is:
290     *   hash_shift * MIN_MATCH >= hash_bits
291     */
292
293    long block_start;
294    /* Window position at the beginning of the current output block. Gets
295     * negative when the window is moved backwards.
296     */
297
298    uInt match_length;           /* length of best match */
299    IPos prev_match;             /* previous match */
300    int match_available;         /* set if previous match exists */
301    uInt strstart;               /* start of string to insert */
302    uInt match_start;            /* start of matching string */
303    uInt lookahead;              /* number of valid bytes ahead in window */
304
305    uInt prev_length;
306    /* Length of the best match at previous step. Matches not greater than this
307     * are discarded. This is used in the lazy match evaluation.
308     */
309
310    uInt max_chain_length;
311    /* To speed up deflation, hash chains are never searched beyond this
312     * length.  A higher limit improves compression ratio but degrades the
313     * speed.
314     */
315
316    uInt max_lazy_match;
317    /* Attempt to find a better match only when the current match is strictly
318     * smaller than this value. This mechanism is used only for compression
319     * levels >= 4.
320     */
321#   define max_insert_length  max_lazy_match
322    /* Insert new strings in the hash table only if the match length is not
323     * greater than this length. This saves time but degrades compression.
324     * max_insert_length is used only for compression levels <= 3.
325     */
326
327    int level;    /* compression level (1..9) */
328    int strategy; /* favor or force Huffman coding*/
329
330    uInt good_match;
331    /* Use a faster search when the previous match is longer than this */
332
333     int nice_match; /* Stop searching when current match exceeds this */
334
335                /* used by trees.c: */
336    /* Didn't use ct_data typedef below to supress compiler warning */
337    struct ct_data_s dyn_ltree[HEAP_SIZE];   /* literal and length tree */
338    struct ct_data_s dyn_dtree[2*D_CODES+1]; /* distance tree */
339    struct ct_data_s bl_tree[2*BL_CODES+1];  /* Huffman tree for bit lengths */
340
341    struct tree_desc_s l_desc;               /* desc. for literal tree */
342    struct tree_desc_s d_desc;               /* desc. for distance tree */
343    struct tree_desc_s bl_desc;              /* desc. for bit length tree */
344
345    ush bl_count[MAX_BITS+1];
346    /* number of codes at each bit length for an optimal tree */
347
348    int heap[2*L_CODES+1];      /* heap used to build the Huffman trees */
349    int heap_len;               /* number of elements in the heap */
350    int heap_max;               /* element of largest frequency */
351    /* The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used.
352     * The same heap array is used to build all trees.
353     */
354
355    uch depth[2*L_CODES+1];
356    /* Depth of each subtree used as tie breaker for trees of equal frequency
357     */
358
359    uchf *l_buf;          /* buffer for literals or lengths */
360
361    uInt  lit_bufsize;
362    /* Size of match buffer for literals/lengths.  There are 4 reasons for
363     * limiting lit_bufsize to 64K:
364     *   - frequencies can be kept in 16 bit counters
365     *   - if compression is not successful for the first block, all input
366     *     data is still in the window so we can still emit a stored block even
367     *     when input comes from standard input.  (This can also be done for
368     *     all blocks if lit_bufsize is not greater than 32K.)
369     *   - if compression is not successful for a file smaller than 64K, we can
370     *     even emit a stored file instead of a stored block (saving 5 bytes).
371     *     This is applicable only for zip (not gzip or zlib).
372     *   - creating new Huffman trees less frequently may not provide fast
373     *     adaptation to changes in the input data statistics. (Take for
374     *     example a binary file with poorly compressible code followed by
375     *     a highly compressible string table.) Smaller buffer sizes give
376     *     fast adaptation but have of course the overhead of transmitting
377     *     trees more frequently.
378     *   - I can't count above 4
379     */
380
381    uInt last_lit;      /* running index in l_buf */
382
383    ushf *d_buf;
384    /* Buffer for distances. To simplify the code, d_buf and l_buf have
385     * the same number of elements. To use different lengths, an extra flag
386     * array would be necessary.
387     */
388
389    ulg opt_len;        /* bit length of current block with optimal trees */
390    ulg static_len;     /* bit length of current block with static trees */
391    ulg compressed_len; /* total bit length of compressed file */
392    uInt matches;       /* number of string matches in current block */
393    int last_eob_len;   /* bit length of EOB code for last block */
394
395#ifdef DEBUG_ZLIB
396    ulg bits_sent;      /* bit length of the compressed data */
397#endif
398
399    ush bi_buf;
400    /* Output buffer. bits are inserted starting at the bottom (least
401     * significant bits).
402     */
403    int bi_valid;
404    /* Number of valid bits in bi_buf.  All bits above the last valid bit
405     * are always zero.
406     */
407
408    uInt blocks_in_packet;
409    /* Number of blocks produced since the last time Z_PACKET_FLUSH
410     * was used.
411     */
412
413} FAR deflate_state;
414
415/* Output a byte on the stream.
416 * IN assertion: there is enough room in pending_buf.
417 */
418#define put_byte(s, c) {s->pending_buf[s->pending++] = (c);}
419
420
421#define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1)
422/* Minimum amount of lookahead, except at the end of the input file.
423 * See deflate.c for comments about the MIN_MATCH+1.
424 */
425
426#define MAX_DIST(s)  ((s)->w_size-MIN_LOOKAHEAD)
427/* In order to simplify the code, particularly on 16 bit machines, match
428 * distances are limited to MAX_DIST instead of WSIZE.
429 */
430
431        /* in trees.c */
432local void ct_init       OF((deflate_state *s));
433local int  ct_tally      OF((deflate_state *s, int dist, int lc));
434local ulg ct_flush_block OF((deflate_state *s, charf *buf, ulg stored_len,
435			     int flush));
436local void ct_align      OF((deflate_state *s));
437local void ct_stored_block OF((deflate_state *s, charf *buf, ulg stored_len,
438                          int eof));
439local void ct_stored_type_only OF((deflate_state *s));
440
441
442/*+++++*/
443/* deflate.c -- compress data using the deflation algorithm
444 * Copyright (C) 1995 Jean-loup Gailly.
445 * For conditions of distribution and use, see copyright notice in zlib.h
446 */
447
448/*
449 *  ALGORITHM
450 *
451 *      The "deflation" process depends on being able to identify portions
452 *      of the input text which are identical to earlier input (within a
453 *      sliding window trailing behind the input currently being processed).
454 *
455 *      The most straightforward technique turns out to be the fastest for
456 *      most input files: try all possible matches and select the longest.
457 *      The key feature of this algorithm is that insertions into the string
458 *      dictionary are very simple and thus fast, and deletions are avoided
459 *      completely. Insertions are performed at each input character, whereas
460 *      string matches are performed only when the previous match ends. So it
461 *      is preferable to spend more time in matches to allow very fast string
462 *      insertions and avoid deletions. The matching algorithm for small
463 *      strings is inspired from that of Rabin & Karp. A brute force approach
464 *      is used to find longer strings when a small match has been found.
465 *      A similar algorithm is used in comic (by Jan-Mark Wams) and freeze
466 *      (by Leonid Broukhis).
467 *         A previous version of this file used a more sophisticated algorithm
468 *      (by Fiala and Greene) which is guaranteed to run in linear amortized
469 *      time, but has a larger average cost, uses more memory and is patented.
470 *      However the F&G algorithm may be faster for some highly redundant
471 *      files if the parameter max_chain_length (described below) is too large.
472 *
473 *  ACKNOWLEDGEMENTS
474 *
475 *      The idea of lazy evaluation of matches is due to Jan-Mark Wams, and
476 *      I found it in 'freeze' written by Leonid Broukhis.
477 *      Thanks to many people for bug reports and testing.
478 *
479 *  REFERENCES
480 *
481 *      Deutsch, L.P.,"'Deflate' Compressed Data Format Specification".
482 *      Available in ftp.uu.net:/pub/archiving/zip/doc/deflate-1.1.doc
483 *
484 *      A description of the Rabin and Karp algorithm is given in the book
485 *         "Algorithms" by R. Sedgewick, Addison-Wesley, p252.
486 *
487 *      Fiala,E.R., and Greene,D.H.
488 *         Data Compression with Finite Windows, Comm.ACM, 32,4 (1989) 490-595
489 *
490 */
491
492/* From: deflate.c,v 1.8 1995/05/03 17:27:08 jloup Exp */
493
494local char zlib_copyright[] = " deflate Copyright 1995 Jean-loup Gailly ";
495/*
496  If you use the zlib library in a product, an acknowledgment is welcome
497  in the documentation of your product. If for some reason you cannot
498  include such an acknowledgment, I would appreciate that you keep this
499  copyright string in the executable of your product.
500 */
501
502#define NIL 0
503/* Tail of hash chains */
504
505#ifndef TOO_FAR
506#  define TOO_FAR 4096
507#endif
508/* Matches of length 3 are discarded if their distance exceeds TOO_FAR */
509
510#define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1)
511/* Minimum amount of lookahead, except at the end of the input file.
512 * See deflate.c for comments about the MIN_MATCH+1.
513 */
514
515/* Values for max_lazy_match, good_match and max_chain_length, depending on
516 * the desired pack level (0..9). The values given below have been tuned to
517 * exclude worst case performance for pathological files. Better values may be
518 * found for specific files.
519 */
520
521typedef struct config_s {
522   ush good_length; /* reduce lazy search above this match length */
523   ush max_lazy;    /* do not perform lazy search above this match length */
524   ush nice_length; /* quit search above this match length */
525   ush max_chain;
526} config;
527
528local config configuration_table[10] = {
529/*      good lazy nice chain */
530/* 0 */ {0,    0,  0,    0},  /* store only */
531/* 1 */ {4,    4,  8,    4},  /* maximum speed, no lazy matches */
532/* 2 */ {4,    5, 16,    8},
533/* 3 */ {4,    6, 32,   32},
534
535/* 4 */ {4,    4, 16,   16},  /* lazy matches */
536/* 5 */ {8,   16, 32,   32},
537/* 6 */ {8,   16, 128, 128},
538/* 7 */ {8,   32, 128, 256},
539/* 8 */ {32, 128, 258, 1024},
540/* 9 */ {32, 258, 258, 4096}}; /* maximum compression */
541
542/* Note: the deflate() code requires max_lazy >= MIN_MATCH and max_chain >= 4
543 * For deflate_fast() (levels <= 3) good is ignored and lazy has a different
544 * meaning.
545 */
546
547#define EQUAL 0
548/* result of memcmp for equal strings */
549
550/* ===========================================================================
551 *  Prototypes for local functions.
552 */
553
554local void fill_window   OF((deflate_state *s));
555local int  deflate_fast  OF((deflate_state *s, int flush));
556local int  deflate_slow  OF((deflate_state *s, int flush));
557local void lm_init       OF((deflate_state *s));
558local int longest_match  OF((deflate_state *s, IPos cur_match));
559local void putShortMSB   OF((deflate_state *s, uInt b));
560local void flush_pending OF((z_stream *strm));
561local int read_buf       OF((z_stream *strm, charf *buf, unsigned size));
562#ifdef ASMV
563      void match_init OF((void)); /* asm code initialization */
564#endif
565
566#ifdef DEBUG_ZLIB
567local  void check_match OF((deflate_state *s, IPos start, IPos match,
568                            int length));
569#endif
570
571
572/* ===========================================================================
573 * Update a hash value with the given input byte
574 * IN  assertion: all calls to to UPDATE_HASH are made with consecutive
575 *    input characters, so that a running hash key can be computed from the
576 *    previous key instead of complete recalculation each time.
577 */
578#define UPDATE_HASH(s,h,c) (h = (((h)<<s->hash_shift) ^ (c)) & s->hash_mask)
579
580
581/* ===========================================================================
582 * Insert string str in the dictionary and set match_head to the previous head
583 * of the hash chain (the most recent string with same hash key). Return
584 * the previous length of the hash chain.
585 * IN  assertion: all calls to to INSERT_STRING are made with consecutive
586 *    input characters and the first MIN_MATCH bytes of str are valid
587 *    (except for the last MIN_MATCH-1 bytes of the input file).
588 */
589#define INSERT_STRING(s, str, match_head) \
590   (UPDATE_HASH(s, s->ins_h, s->window[(str) + (MIN_MATCH-1)]), \
591    s->prev[(str) & s->w_mask] = match_head = s->head[s->ins_h], \
592    s->head[s->ins_h] = (str))
593
594/* ===========================================================================
595 * Initialize the hash table (avoiding 64K overflow for 16 bit systems).
596 * prev[] will be initialized on the fly.
597 */
598#define CLEAR_HASH(s) \
599    s->head[s->hash_size-1] = NIL; \
600    zmemzero((charf *)s->head, (unsigned)(s->hash_size-1)*sizeof(*s->head));
601
602/* ========================================================================= */
603int deflateInit (strm, level)
604    z_stream *strm;
605    int level;
606{
607    return deflateInit2 (strm, level, DEFLATED, MAX_WBITS, DEF_MEM_LEVEL,
608			 0, 0);
609    /* To do: ignore strm->next_in if we use it as window */
610}
611
612/* ========================================================================= */
613int deflateInit2 (strm, level, method, windowBits, memLevel,
614		  strategy, minCompression)
615    z_stream *strm;
616    int  level;
617    int  method;
618    int  windowBits;
619    int  memLevel;
620    int  strategy;
621    int  minCompression;
622{
623    deflate_state *s;
624    int noheader = 0;
625
626    if (strm == Z_NULL) return Z_STREAM_ERROR;
627
628    strm->msg = Z_NULL;
629/*    if (strm->zalloc == Z_NULL) strm->zalloc = zcalloc; */
630/*    if (strm->zfree == Z_NULL) strm->zfree = zcfree; */
631
632    if (level == Z_DEFAULT_COMPRESSION) level = 6;
633
634    if (windowBits < 0) { /* undocumented feature: suppress zlib header */
635        noheader = 1;
636        windowBits = -windowBits;
637    }
638    if (memLevel < 1 || memLevel > MAX_MEM_LEVEL || method != DEFLATED ||
639        windowBits < 8 || windowBits > 15 || level < 1 || level > 9) {
640        return Z_STREAM_ERROR;
641    }
642    s = (deflate_state *) ZALLOC_INIT(strm, 1, sizeof(deflate_state));
643    if (s == Z_NULL) return Z_MEM_ERROR;
644    strm->state = (struct internal_state FAR *)s;
645    s->strm = strm;
646
647    s->noheader = noheader;
648    s->w_bits = windowBits;
649    s->w_size = 1 << s->w_bits;
650    s->w_mask = s->w_size - 1;
651
652    s->hash_bits = memLevel + 7;
653    s->hash_size = 1 << s->hash_bits;
654    s->hash_mask = s->hash_size - 1;
655    s->hash_shift =  ((s->hash_bits+MIN_MATCH-1)/MIN_MATCH);
656
657    s->window = (Bytef *) ZALLOC_INIT(strm, s->w_size, 2*sizeof(Byte));
658    s->prev   = (Posf *)  ZALLOC_INIT(strm, s->w_size, sizeof(Pos));
659    s->head   = (Posf *)  ZALLOC_INIT(strm, s->hash_size, sizeof(Pos));
660
661    s->lit_bufsize = 1 << (memLevel + 6); /* 16K elements by default */
662
663    s->pending_buf = (uchf *) ZALLOC_INIT(strm, s->lit_bufsize, 2*sizeof(ush));
664
665    if (s->window == Z_NULL || s->prev == Z_NULL || s->head == Z_NULL ||
666        s->pending_buf == Z_NULL) {
667        strm->msg = z_errmsg[1-Z_MEM_ERROR];
668        deflateEnd (strm);
669        return Z_MEM_ERROR;
670    }
671    s->d_buf = (ushf *) &(s->pending_buf[s->lit_bufsize]);
672    s->l_buf = (uchf *) &(s->pending_buf[3*s->lit_bufsize]);
673    /* We overlay pending_buf and d_buf+l_buf. This works since the average
674     * output size for (length,distance) codes is <= 32 bits (worst case
675     * is 15+15+13=33).
676     */
677
678    s->level = level;
679    s->strategy = strategy;
680    s->method = (Byte)method;
681    s->minCompr = minCompression;
682    s->blocks_in_packet = 0;
683
684    return deflateReset(strm);
685}
686
687/* ========================================================================= */
688int deflateReset (strm)
689    z_stream *strm;
690{
691    deflate_state *s;
692
693    if (strm == Z_NULL || strm->state == Z_NULL ||
694        strm->zalloc == Z_NULL || strm->zfree == Z_NULL ||
695	strm->zalloc_init == Z_NULL) return Z_STREAM_ERROR;
696
697    strm->total_in = strm->total_out = 0;
698    strm->msg = Z_NULL; /* use zfree if we ever allocate msg dynamically */
699    strm->data_type = Z_UNKNOWN;
700
701    s = (deflate_state *)strm->state;
702    s->pending = 0;
703    s->pending_out = s->pending_buf;
704
705    if (s->noheader < 0) {
706        s->noheader = 0; /* was set to -1 by deflate(..., Z_FINISH); */
707    }
708    s->status = s->noheader ? BUSY_STATE : INIT_STATE;
709    s->adler = 1;
710
711    ct_init(s);
712    lm_init(s);
713
714    return Z_OK;
715}
716
717/* =========================================================================
718 * Put a short in the pending buffer. The 16-bit value is put in MSB order.
719 * IN assertion: the stream state is correct and there is enough room in
720 * pending_buf.
721 */
722local void putShortMSB (s, b)
723    deflate_state *s;
724    uInt b;
725{
726    put_byte(s, (Byte)(b >> 8));
727    put_byte(s, (Byte)(b & 0xff));
728}
729
730/* =========================================================================
731 * Flush as much pending output as possible.
732 */
733local void flush_pending(strm)
734    z_stream *strm;
735{
736    deflate_state *state = (deflate_state *) strm->state;
737    unsigned len = state->pending;
738
739    if (len > strm->avail_out) len = strm->avail_out;
740    if (len == 0) return;
741
742    if (strm->next_out != NULL) {
743	zmemcpy(strm->next_out, state->pending_out, len);
744	strm->next_out += len;
745    }
746    state->pending_out += len;
747    strm->total_out += len;
748    strm->avail_out -= len;
749    state->pending -= len;
750    if (state->pending == 0) {
751        state->pending_out = state->pending_buf;
752    }
753}
754
755/* ========================================================================= */
756int deflate (strm, flush)
757    z_stream *strm;
758    int flush;
759{
760    deflate_state *state = (deflate_state *) strm->state;
761
762    if (strm == Z_NULL || state == Z_NULL) return Z_STREAM_ERROR;
763
764    if (strm->next_in == Z_NULL && strm->avail_in != 0) {
765        ERR_RETURN(strm, Z_STREAM_ERROR);
766    }
767    if (strm->avail_out == 0) ERR_RETURN(strm, Z_BUF_ERROR);
768
769    state->strm = strm; /* just in case */
770
771    /* Write the zlib header */
772    if (state->status == INIT_STATE) {
773
774        uInt header = (DEFLATED + ((state->w_bits-8)<<4)) << 8;
775        uInt level_flags = (state->level-1) >> 1;
776
777        if (level_flags > 3) level_flags = 3;
778        header |= (level_flags << 6);
779        header += 31 - (header % 31);
780
781        state->status = BUSY_STATE;
782        putShortMSB(state, header);
783    }
784
785    /* Flush as much pending output as possible */
786    if (state->pending != 0) {
787        flush_pending(strm);
788        if (strm->avail_out == 0) return Z_OK;
789    }
790
791    /* If we came back in here to get the last output from
792     * a previous flush, we're done for now.
793     */
794    if (state->status == FLUSH_STATE) {
795	state->status = BUSY_STATE;
796	if (flush != Z_NO_FLUSH && flush != Z_FINISH)
797	    return Z_OK;
798    }
799
800    /* User must not provide more input after the first FINISH: */
801    if (state->status == FINISH_STATE && strm->avail_in != 0) {
802        ERR_RETURN(strm, Z_BUF_ERROR);
803    }
804
805    /* Start a new block or continue the current one.
806     */
807    if (strm->avail_in != 0 || state->lookahead != 0 ||
808        (flush == Z_FINISH && state->status != FINISH_STATE)) {
809        int quit;
810
811        if (flush == Z_FINISH) {
812            state->status = FINISH_STATE;
813        }
814        if (state->level <= 3) {
815            quit = deflate_fast(state, flush);
816        } else {
817            quit = deflate_slow(state, flush);
818        }
819        if (quit || strm->avail_out == 0)
820	    return Z_OK;
821        /* If flush != Z_NO_FLUSH && avail_out == 0, the next call
822         * of deflate should use the same flush parameter to make sure
823         * that the flush is complete. So we don't have to output an
824         * empty block here, this will be done at next call. This also
825         * ensures that for a very small output buffer, we emit at most
826         * one empty block.
827         */
828    }
829
830    /* If a flush was requested, we have a little more to output now. */
831    if (flush != Z_NO_FLUSH && flush != Z_FINISH
832	&& state->status != FINISH_STATE) {
833	switch (flush) {
834	case Z_PARTIAL_FLUSH:
835	    ct_align(state);
836	    break;
837	case Z_PACKET_FLUSH:
838	    /* Output just the 3-bit `stored' block type value,
839	       but not a zero length. */
840	    ct_stored_type_only(state);
841	    break;
842	default:
843	    ct_stored_block(state, (char*)0, 0L, 0);
844	    /* For a full flush, this empty block will be recognized
845	     * as a special marker by inflate_sync().
846	     */
847	    if (flush == Z_FULL_FLUSH) {
848		CLEAR_HASH(state);             /* forget history */
849	    }
850	}
851	flush_pending(strm);
852	if (strm->avail_out == 0) {
853	    /* We'll have to come back to get the rest of the output;
854	     * this ensures we don't output a second zero-length stored
855	     * block (or whatever).
856	     */
857	    state->status = FLUSH_STATE;
858	    return Z_OK;
859	}
860    }
861
862    Assert(strm->avail_out > 0, "bug2");
863
864    if (flush != Z_FINISH) return Z_OK;
865    if (state->noheader) return Z_STREAM_END;
866
867    /* Write the zlib trailer (adler32) */
868    putShortMSB(state, (uInt)(state->adler >> 16));
869    putShortMSB(state, (uInt)(state->adler & 0xffff));
870    flush_pending(strm);
871    /* If avail_out is zero, the application will call deflate again
872     * to flush the rest.
873     */
874    state->noheader = -1; /* write the trailer only once! */
875    return state->pending != 0 ? Z_OK : Z_STREAM_END;
876}
877
878/* ========================================================================= */
879int deflateEnd (strm)
880    z_stream *strm;
881{
882    deflate_state *state = (deflate_state *) strm->state;
883
884    if (strm == Z_NULL || state == Z_NULL) return Z_STREAM_ERROR;
885
886    TRY_FREE(strm, state->window, state->w_size * 2 * sizeof(Byte));
887    TRY_FREE(strm, state->prev, state->w_size * sizeof(Pos));
888    TRY_FREE(strm, state->head, state->hash_size * sizeof(Pos));
889    TRY_FREE(strm, state->pending_buf, state->lit_bufsize * 2 * sizeof(ush));
890
891    ZFREE(strm, state, sizeof(deflate_state));
892    strm->state = Z_NULL;
893
894    return Z_OK;
895}
896
897/* ===========================================================================
898 * Read a new buffer from the current input stream, update the adler32
899 * and total number of bytes read.
900 */
901local int read_buf(strm, buf, size)
902    z_stream *strm;
903    charf *buf;
904    unsigned size;
905{
906    unsigned len = strm->avail_in;
907    deflate_state *state = (deflate_state *) strm->state;
908
909    if (len > size) len = size;
910    if (len == 0) return 0;
911
912    strm->avail_in  -= len;
913
914    if (!state->noheader) {
915        state->adler = adler32(state->adler, strm->next_in, len);
916    }
917    zmemcpy(buf, strm->next_in, len);
918    strm->next_in  += len;
919    strm->total_in += len;
920
921    return (int)len;
922}
923
924/* ===========================================================================
925 * Initialize the "longest match" routines for a new zlib stream
926 */
927local void lm_init (s)
928    deflate_state *s;
929{
930    s->window_size = (ulg)2L*s->w_size;
931
932    CLEAR_HASH(s);
933
934    /* Set the default configuration parameters:
935     */
936    s->max_lazy_match   = configuration_table[s->level].max_lazy;
937    s->good_match       = configuration_table[s->level].good_length;
938    s->nice_match       = configuration_table[s->level].nice_length;
939    s->max_chain_length = configuration_table[s->level].max_chain;
940
941    s->strstart = 0;
942    s->block_start = 0L;
943    s->lookahead = 0;
944    s->match_length = MIN_MATCH-1;
945    s->match_available = 0;
946    s->ins_h = 0;
947#ifdef ASMV
948    match_init(); /* initialize the asm code */
949#endif
950}
951
952/* ===========================================================================
953 * Set match_start to the longest match starting at the given string and
954 * return its length. Matches shorter or equal to prev_length are discarded,
955 * in which case the result is equal to prev_length and match_start is
956 * garbage.
957 * IN assertions: cur_match is the head of the hash chain for the current
958 *   string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1
959 */
960#ifndef ASMV
961/* For 80x86 and 680x0, an optimized version will be provided in match.asm or
962 * match.S. The code will be functionally equivalent.
963 */
964local int longest_match(s, cur_match)
965    deflate_state *s;
966    IPos cur_match;                             /* current match */
967{
968    unsigned chain_length = s->max_chain_length;/* max hash chain length */
969    register Bytef *scan = s->window + s->strstart; /* current string */
970    register Bytef *match;                       /* matched string */
971    register int len;                           /* length of current match */
972    int best_len = s->prev_length;              /* best match length so far */
973    IPos limit = s->strstart > (IPos)MAX_DIST(s) ?
974        s->strstart - (IPos)MAX_DIST(s) : NIL;
975    /* Stop when cur_match becomes <= limit. To simplify the code,
976     * we prevent matches with the string of window index 0.
977     */
978    Posf *prev = s->prev;
979    uInt wmask = s->w_mask;
980
981#ifdef UNALIGNED_OK
982    /* Compare two bytes at a time. Note: this is not always beneficial.
983     * Try with and without -DUNALIGNED_OK to check.
984     */
985    register Bytef *strend = s->window + s->strstart + MAX_MATCH - 1;
986    register ush scan_start = *(ushf*)scan;
987    register ush scan_end   = *(ushf*)(scan+best_len-1);
988#else
989    register Bytef *strend = s->window + s->strstart + MAX_MATCH;
990    register Byte scan_end1  = scan[best_len-1];
991    register Byte scan_end   = scan[best_len];
992#endif
993
994    /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16.
995     * It is easy to get rid of this optimization if necessary.
996     */
997    Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever");
998
999    /* Do not waste too much time if we already have a good match: */
1000    if (s->prev_length >= s->good_match) {
1001        chain_length >>= 2;
1002    }
1003    Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead");
1004
1005    do {
1006        Assert(cur_match < s->strstart, "no future");
1007        match = s->window + cur_match;
1008
1009        /* Skip to next match if the match length cannot increase
1010         * or if the match length is less than 2:
1011         */
1012#if (defined(UNALIGNED_OK) && MAX_MATCH == 258)
1013        /* This code assumes sizeof(unsigned short) == 2. Do not use
1014         * UNALIGNED_OK if your compiler uses a different size.
1015         */
1016        if (*(ushf*)(match+best_len-1) != scan_end ||
1017            *(ushf*)match != scan_start) continue;
1018
1019        /* It is not necessary to compare scan[2] and match[2] since they are
1020         * always equal when the other bytes match, given that the hash keys
1021         * are equal and that HASH_BITS >= 8. Compare 2 bytes at a time at
1022         * strstart+3, +5, ... up to strstart+257. We check for insufficient
1023         * lookahead only every 4th comparison; the 128th check will be made
1024         * at strstart+257. If MAX_MATCH-2 is not a multiple of 8, it is
1025         * necessary to put more guard bytes at the end of the window, or
1026         * to check more often for insufficient lookahead.
1027         */
1028        Assert(scan[2] == match[2], "scan[2]?");
1029        scan++, match++;
1030        do {
1031        } while (*(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
1032                 *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
1033                 *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
1034                 *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
1035                 scan < strend);
1036        /* The funny "do {}" generates better code on most compilers */
1037
1038        /* Here, scan <= window+strstart+257 */
1039        Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan");
1040        if (*scan == *match) scan++;
1041
1042        len = (MAX_MATCH - 1) - (int)(strend-scan);
1043        scan = strend - (MAX_MATCH-1);
1044
1045#else /* UNALIGNED_OK */
1046
1047        if (match[best_len]   != scan_end  ||
1048            match[best_len-1] != scan_end1 ||
1049            *match            != *scan     ||
1050            *++match          != scan[1])      continue;
1051
1052        /* The check at best_len-1 can be removed because it will be made
1053         * again later. (This heuristic is not always a win.)
1054         * It is not necessary to compare scan[2] and match[2] since they
1055         * are always equal when the other bytes match, given that
1056         * the hash keys are equal and that HASH_BITS >= 8.
1057         */
1058        scan += 2, match++;
1059        Assert(*scan == *match, "match[2]?");
1060
1061        /* We check for insufficient lookahead only every 8th comparison;
1062         * the 256th check will be made at strstart+258.
1063         */
1064        do {
1065        } while (*++scan == *++match && *++scan == *++match &&
1066                 *++scan == *++match && *++scan == *++match &&
1067                 *++scan == *++match && *++scan == *++match &&
1068                 *++scan == *++match && *++scan == *++match &&
1069                 scan < strend);
1070
1071        Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan");
1072
1073        len = MAX_MATCH - (int)(strend - scan);
1074        scan = strend - MAX_MATCH;
1075
1076#endif /* UNALIGNED_OK */
1077
1078        if (len > best_len) {
1079            s->match_start = cur_match;
1080            best_len = len;
1081            if (len >= s->nice_match) break;
1082#ifdef UNALIGNED_OK
1083            scan_end = *(ushf*)(scan+best_len-1);
1084#else
1085            scan_end1  = scan[best_len-1];
1086            scan_end   = scan[best_len];
1087#endif
1088        }
1089    } while ((cur_match = prev[cur_match & wmask]) > limit
1090             && --chain_length != 0);
1091
1092    return best_len;
1093}
1094#endif /* ASMV */
1095
1096#ifdef DEBUG_ZLIB
1097/* ===========================================================================
1098 * Check that the match at match_start is indeed a match.
1099 */
1100local void check_match(s, start, match, length)
1101    deflate_state *s;
1102    IPos start, match;
1103    int length;
1104{
1105    /* check that the match is indeed a match */
1106    if (memcmp((charf *)s->window + match,
1107                (charf *)s->window + start, length) != EQUAL) {
1108        fprintf(stderr,
1109            " start %u, match %u, length %d\n",
1110            start, match, length);
1111        do { fprintf(stderr, "%c%c", s->window[match++],
1112                     s->window[start++]); } while (--length != 0);
1113        z_error("invalid match");
1114    }
1115    if (verbose > 1) {
1116        fprintf(stderr,"\\[%d,%d]", start-match, length);
1117        do { putc(s->window[start++], stderr); } while (--length != 0);
1118    }
1119}
1120#else
1121#  define check_match(s, start, match, length)
1122#endif
1123
1124/* ===========================================================================
1125 * Fill the window when the lookahead becomes insufficient.
1126 * Updates strstart and lookahead.
1127 *
1128 * IN assertion: lookahead < MIN_LOOKAHEAD
1129 * OUT assertions: strstart <= window_size-MIN_LOOKAHEAD
1130 *    At least one byte has been read, or avail_in == 0; reads are
1131 *    performed for at least two bytes (required for the zip translate_eol
1132 *    option -- not supported here).
1133 */
1134local void fill_window(s)
1135    deflate_state *s;
1136{
1137    register unsigned n, m;
1138    register Posf *p;
1139    unsigned more;    /* Amount of free space at the end of the window. */
1140    uInt wsize = s->w_size;
1141
1142    do {
1143        more = (unsigned)(s->window_size -(ulg)s->lookahead -(ulg)s->strstart);
1144
1145        /* Deal with !@#$% 64K limit: */
1146        if (more == 0 && s->strstart == 0 && s->lookahead == 0) {
1147            more = wsize;
1148        } else if (more == (unsigned)(-1)) {
1149            /* Very unlikely, but possible on 16 bit machine if strstart == 0
1150             * and lookahead == 1 (input done one byte at time)
1151             */
1152            more--;
1153
1154        /* If the window is almost full and there is insufficient lookahead,
1155         * move the upper half to the lower one to make room in the upper half.
1156         */
1157        } else if (s->strstart >= wsize+MAX_DIST(s)) {
1158
1159            /* By the IN assertion, the window is not empty so we can't confuse
1160             * more == 0 with more == 64K on a 16 bit machine.
1161             */
1162            zmemcpy((charf *)s->window, (charf *)s->window+wsize,
1163                   (unsigned)wsize);
1164            s->match_start -= wsize;
1165            s->strstart    -= wsize; /* we now have strstart >= MAX_DIST */
1166
1167            s->block_start -= (long) wsize;
1168
1169            /* Slide the hash table (could be avoided with 32 bit values
1170               at the expense of memory usage):
1171             */
1172            n = s->hash_size;
1173            p = &s->head[n];
1174            do {
1175                m = *--p;
1176                *p = (Pos)(m >= wsize ? m-wsize : NIL);
1177            } while (--n);
1178
1179            n = wsize;
1180            p = &s->prev[n];
1181            do {
1182                m = *--p;
1183                *p = (Pos)(m >= wsize ? m-wsize : NIL);
1184                /* If n is not on any hash chain, prev[n] is garbage but
1185                 * its value will never be used.
1186                 */
1187            } while (--n);
1188
1189            more += wsize;
1190        }
1191        if (s->strm->avail_in == 0) return;
1192
1193        /* If there was no sliding:
1194         *    strstart <= WSIZE+MAX_DIST-1 && lookahead <= MIN_LOOKAHEAD - 1 &&
1195         *    more == window_size - lookahead - strstart
1196         * => more >= window_size - (MIN_LOOKAHEAD-1 + WSIZE + MAX_DIST-1)
1197         * => more >= window_size - 2*WSIZE + 2
1198         * In the BIG_MEM or MMAP case (not yet supported),
1199         *   window_size == input_size + MIN_LOOKAHEAD  &&
1200         *   strstart + s->lookahead <= input_size => more >= MIN_LOOKAHEAD.
1201         * Otherwise, window_size == 2*WSIZE so more >= 2.
1202         * If there was sliding, more >= WSIZE. So in all cases, more >= 2.
1203         */
1204        Assert(more >= 2, "more < 2");
1205
1206        n = read_buf(s->strm, (charf *)s->window + s->strstart + s->lookahead,
1207                     more);
1208        s->lookahead += n;
1209
1210        /* Initialize the hash value now that we have some input: */
1211        if (s->lookahead >= MIN_MATCH) {
1212            s->ins_h = s->window[s->strstart];
1213            UPDATE_HASH(s, s->ins_h, s->window[s->strstart+1]);
1214#if MIN_MATCH != 3
1215            Call UPDATE_HASH() MIN_MATCH-3 more times
1216#endif
1217        }
1218        /* If the whole input has less than MIN_MATCH bytes, ins_h is garbage,
1219         * but this is not important since only literal bytes will be emitted.
1220         */
1221
1222    } while (s->lookahead < MIN_LOOKAHEAD && s->strm->avail_in != 0);
1223}
1224
1225/* ===========================================================================
1226 * Flush the current block, with given end-of-file flag.
1227 * IN assertion: strstart is set to the end of the current match.
1228 */
1229#define FLUSH_BLOCK_ONLY(s, flush) { \
1230   ct_flush_block(s, (s->block_start >= 0L ? \
1231           (charf *)&s->window[(unsigned)s->block_start] : \
1232           (charf *)Z_NULL), (long)s->strstart - s->block_start, (flush)); \
1233   s->block_start = s->strstart; \
1234   flush_pending(s->strm); \
1235   Tracev((stderr,"[FLUSH]")); \
1236}
1237
1238/* Same but force premature exit if necessary. */
1239#define FLUSH_BLOCK(s, flush) { \
1240   FLUSH_BLOCK_ONLY(s, flush); \
1241   if (s->strm->avail_out == 0) return 1; \
1242}
1243
1244/* ===========================================================================
1245 * Compress as much as possible from the input stream, return true if
1246 * processing was terminated prematurely (no more input or output space).
1247 * This function does not perform lazy evaluationof matches and inserts
1248 * new strings in the dictionary only for unmatched strings or for short
1249 * matches. It is used only for the fast compression options.
1250 */
1251local int deflate_fast(s, flush)
1252    deflate_state *s;
1253    int flush;
1254{
1255    IPos hash_head = NIL; /* head of the hash chain */
1256    int bflush;     /* set if current block must be flushed */
1257
1258    s->prev_length = MIN_MATCH-1;
1259
1260    for (;;) {
1261        /* Make sure that we always have enough lookahead, except
1262         * at the end of the input file. We need MAX_MATCH bytes
1263         * for the next match, plus MIN_MATCH bytes to insert the
1264         * string following the next match.
1265         */
1266        if (s->lookahead < MIN_LOOKAHEAD) {
1267            fill_window(s);
1268            if (s->lookahead < MIN_LOOKAHEAD && flush == Z_NO_FLUSH) return 1;
1269
1270            if (s->lookahead == 0) break; /* flush the current block */
1271        }
1272
1273        /* Insert the string window[strstart .. strstart+2] in the
1274         * dictionary, and set hash_head to the head of the hash chain:
1275         */
1276        if (s->lookahead >= MIN_MATCH) {
1277            INSERT_STRING(s, s->strstart, hash_head);
1278        }
1279
1280        /* Find the longest match, discarding those <= prev_length.
1281         * At this point we have always match_length < MIN_MATCH
1282         */
1283        if (hash_head != NIL && s->strstart - hash_head <= MAX_DIST(s)) {
1284            /* To simplify the code, we prevent matches with the string
1285             * of window index 0 (in particular we have to avoid a match
1286             * of the string with itself at the start of the input file).
1287             */
1288            if (s->strategy != Z_HUFFMAN_ONLY) {
1289                s->match_length = longest_match (s, hash_head);
1290            }
1291            /* longest_match() sets match_start */
1292
1293            if (s->match_length > s->lookahead) s->match_length = s->lookahead;
1294        }
1295        if (s->match_length >= MIN_MATCH) {
1296            check_match(s, s->strstart, s->match_start, s->match_length);
1297
1298            bflush = ct_tally(s, s->strstart - s->match_start,
1299                              s->match_length - MIN_MATCH);
1300
1301            s->lookahead -= s->match_length;
1302
1303            /* Insert new strings in the hash table only if the match length
1304             * is not too large. This saves time but degrades compression.
1305             */
1306            if (s->match_length <= s->max_insert_length &&
1307                s->lookahead >= MIN_MATCH) {
1308                s->match_length--; /* string at strstart already in hash table */
1309                do {
1310                    s->strstart++;
1311                    INSERT_STRING(s, s->strstart, hash_head);
1312                    /* strstart never exceeds WSIZE-MAX_MATCH, so there are
1313                     * always MIN_MATCH bytes ahead.
1314                     */
1315                } while (--s->match_length != 0);
1316                s->strstart++;
1317            } else {
1318                s->strstart += s->match_length;
1319                s->match_length = 0;
1320                s->ins_h = s->window[s->strstart];
1321                UPDATE_HASH(s, s->ins_h, s->window[s->strstart+1]);
1322#if MIN_MATCH != 3
1323                Call UPDATE_HASH() MIN_MATCH-3 more times
1324#endif
1325                /* If lookahead < MIN_MATCH, ins_h is garbage, but it does not
1326                 * matter since it will be recomputed at next deflate call.
1327                 */
1328            }
1329        } else {
1330            /* No match, output a literal byte */
1331            Tracevv((stderr,"%c", s->window[s->strstart]));
1332            bflush = ct_tally (s, 0, s->window[s->strstart]);
1333            s->lookahead--;
1334            s->strstart++;
1335        }
1336        if (bflush) FLUSH_BLOCK(s, Z_NO_FLUSH);
1337    }
1338    FLUSH_BLOCK(s, flush);
1339    return 0; /* normal exit */
1340}
1341
1342/* ===========================================================================
1343 * Same as above, but achieves better compression. We use a lazy
1344 * evaluation for matches: a match is finally adopted only if there is
1345 * no better match at the next window position.
1346 */
1347local int deflate_slow(s, flush)
1348    deflate_state *s;
1349    int flush;
1350{
1351    IPos hash_head = NIL;    /* head of hash chain */
1352    int bflush;              /* set if current block must be flushed */
1353
1354    /* Process the input block. */
1355    for (;;) {
1356        /* Make sure that we always have enough lookahead, except
1357         * at the end of the input file. We need MAX_MATCH bytes
1358         * for the next match, plus MIN_MATCH bytes to insert the
1359         * string following the next match.
1360         */
1361        if (s->lookahead < MIN_LOOKAHEAD) {
1362            fill_window(s);
1363            if (s->lookahead < MIN_LOOKAHEAD && flush == Z_NO_FLUSH) return 1;
1364
1365            if (s->lookahead == 0) break; /* flush the current block */
1366        }
1367
1368        /* Insert the string window[strstart .. strstart+2] in the
1369         * dictionary, and set hash_head to the head of the hash chain:
1370         */
1371        if (s->lookahead >= MIN_MATCH) {
1372            INSERT_STRING(s, s->strstart, hash_head);
1373        }
1374
1375        /* Find the longest match, discarding those <= prev_length.
1376         */
1377        s->prev_length = s->match_length, s->prev_match = s->match_start;
1378        s->match_length = MIN_MATCH-1;
1379
1380        if (hash_head != NIL && s->prev_length < s->max_lazy_match &&
1381            s->strstart - hash_head <= MAX_DIST(s)) {
1382            /* To simplify the code, we prevent matches with the string
1383             * of window index 0 (in particular we have to avoid a match
1384             * of the string with itself at the start of the input file).
1385             */
1386            if (s->strategy != Z_HUFFMAN_ONLY) {
1387                s->match_length = longest_match (s, hash_head);
1388            }
1389            /* longest_match() sets match_start */
1390            if (s->match_length > s->lookahead) s->match_length = s->lookahead;
1391
1392            if (s->match_length <= 5 && (s->strategy == Z_FILTERED ||
1393                 (s->match_length == MIN_MATCH &&
1394                  s->strstart - s->match_start > TOO_FAR))) {
1395
1396                /* If prev_match is also MIN_MATCH, match_start is garbage
1397                 * but we will ignore the current match anyway.
1398                 */
1399                s->match_length = MIN_MATCH-1;
1400            }
1401        }
1402        /* If there was a match at the previous step and the current
1403         * match is not better, output the previous match:
1404         */
1405        if (s->prev_length >= MIN_MATCH && s->match_length <= s->prev_length) {
1406            uInt max_insert = s->strstart + s->lookahead - MIN_MATCH;
1407            /* Do not insert strings in hash table beyond this. */
1408
1409            check_match(s, s->strstart-1, s->prev_match, s->prev_length);
1410
1411            bflush = ct_tally(s, s->strstart -1 - s->prev_match,
1412                              s->prev_length - MIN_MATCH);
1413
1414            /* Insert in hash table all strings up to the end of the match.
1415             * strstart-1 and strstart are already inserted. If there is not
1416             * enough lookahead, the last two strings are not inserted in
1417             * the hash table.
1418             */
1419            s->lookahead -= s->prev_length-1;
1420            s->prev_length -= 2;
1421            do {
1422                if (++s->strstart <= max_insert) {
1423                    INSERT_STRING(s, s->strstart, hash_head);
1424                }
1425            } while (--s->prev_length != 0);
1426            s->match_available = 0;
1427            s->match_length = MIN_MATCH-1;
1428            s->strstart++;
1429
1430            if (bflush) FLUSH_BLOCK(s, Z_NO_FLUSH);
1431
1432        } else if (s->match_available) {
1433            /* If there was no match at the previous position, output a
1434             * single literal. If there was a match but the current match
1435             * is longer, truncate the previous match to a single literal.
1436             */
1437            Tracevv((stderr,"%c", s->window[s->strstart-1]));
1438            if (ct_tally (s, 0, s->window[s->strstart-1])) {
1439                FLUSH_BLOCK_ONLY(s, Z_NO_FLUSH);
1440            }
1441            s->strstart++;
1442            s->lookahead--;
1443            if (s->strm->avail_out == 0) return 1;
1444        } else {
1445            /* There is no previous match to compare with, wait for
1446             * the next step to decide.
1447             */
1448            s->match_available = 1;
1449            s->strstart++;
1450            s->lookahead--;
1451        }
1452    }
1453    Assert (flush != Z_NO_FLUSH, "no flush?");
1454    if (s->match_available) {
1455        Tracevv((stderr,"%c", s->window[s->strstart-1]));
1456        ct_tally (s, 0, s->window[s->strstart-1]);
1457        s->match_available = 0;
1458    }
1459    FLUSH_BLOCK(s, flush);
1460    return 0;
1461}
1462
1463
1464/*+++++*/
1465/* trees.c -- output deflated data using Huffman coding
1466 * Copyright (C) 1995 Jean-loup Gailly
1467 * For conditions of distribution and use, see copyright notice in zlib.h
1468 */
1469
1470/*
1471 *  ALGORITHM
1472 *
1473 *      The "deflation" process uses several Huffman trees. The more
1474 *      common source values are represented by shorter bit sequences.
1475 *
1476 *      Each code tree is stored in a compressed form which is itself
1477 * a Huffman encoding of the lengths of all the code strings (in
1478 * ascending order by source values).  The actual code strings are
1479 * reconstructed from the lengths in the inflate process, as described
1480 * in the deflate specification.
1481 *
1482 *  REFERENCES
1483 *
1484 *      Deutsch, L.P.,"'Deflate' Compressed Data Format Specification".
1485 *      Available in ftp.uu.net:/pub/archiving/zip/doc/deflate-1.1.doc
1486 *
1487 *      Storer, James A.
1488 *          Data Compression:  Methods and Theory, pp. 49-50.
1489 *          Computer Science Press, 1988.  ISBN 0-7167-8156-5.
1490 *
1491 *      Sedgewick, R.
1492 *          Algorithms, p290.
1493 *          Addison-Wesley, 1983. ISBN 0-201-06672-6.
1494 */
1495
1496/* From: trees.c,v 1.5 1995/05/03 17:27:12 jloup Exp */
1497
1498#ifdef DEBUG_ZLIB
1499#  include <ctype.h>
1500#endif
1501
1502/* ===========================================================================
1503 * Constants
1504 */
1505
1506#define MAX_BL_BITS 7
1507/* Bit length codes must not exceed MAX_BL_BITS bits */
1508
1509#define END_BLOCK 256
1510/* end of block literal code */
1511
1512#define REP_3_6      16
1513/* repeat previous bit length 3-6 times (2 bits of repeat count) */
1514
1515#define REPZ_3_10    17
1516/* repeat a zero length 3-10 times  (3 bits of repeat count) */
1517
1518#define REPZ_11_138  18
1519/* repeat a zero length 11-138 times  (7 bits of repeat count) */
1520
1521local int extra_lbits[LENGTH_CODES] /* extra bits for each length code */
1522   = {0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0};
1523
1524local int extra_dbits[D_CODES] /* extra bits for each distance code */
1525   = {0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13};
1526
1527local int extra_blbits[BL_CODES]/* extra bits for each bit length code */
1528   = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7};
1529
1530local uch bl_order[BL_CODES]
1531   = {16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15};
1532/* The lengths of the bit length codes are sent in order of decreasing
1533 * probability, to avoid transmitting the lengths for unused bit length codes.
1534 */
1535
1536#define Buf_size (8 * 2*sizeof(char))
1537/* Number of bits used within bi_buf. (bi_buf might be implemented on
1538 * more than 16 bits on some systems.)
1539 */
1540
1541/* ===========================================================================
1542 * Local data. These are initialized only once.
1543 * To do: initialize at compile time to be completely reentrant. ???
1544 */
1545
1546local ct_data static_ltree[L_CODES+2];
1547/* The static literal tree. Since the bit lengths are imposed, there is no
1548 * need for the L_CODES extra codes used during heap construction. However
1549 * The codes 286 and 287 are needed to build a canonical tree (see ct_init
1550 * below).
1551 */
1552
1553local ct_data static_dtree[D_CODES];
1554/* The static distance tree. (Actually a trivial tree since all codes use
1555 * 5 bits.)
1556 */
1557
1558local uch dist_code[512];
1559/* distance codes. The first 256 values correspond to the distances
1560 * 3 .. 258, the last 256 values correspond to the top 8 bits of
1561 * the 15 bit distances.
1562 */
1563
1564local uch length_code[MAX_MATCH-MIN_MATCH+1];
1565/* length code for each normalized match length (0 == MIN_MATCH) */
1566
1567local int base_length[LENGTH_CODES];
1568/* First normalized length for each code (0 = MIN_MATCH) */
1569
1570local int base_dist[D_CODES];
1571/* First normalized distance for each code (0 = distance of 1) */
1572
1573struct static_tree_desc_s {
1574    ct_data *static_tree;        /* static tree or NULL */
1575    intf    *extra_bits;         /* extra bits for each code or NULL */
1576    int     extra_base;          /* base index for extra_bits */
1577    int     elems;               /* max number of elements in the tree */
1578    int     max_length;          /* max bit length for the codes */
1579};
1580
1581local static_tree_desc  static_l_desc =
1582{static_ltree, extra_lbits, LITERALS+1, L_CODES, MAX_BITS};
1583
1584local static_tree_desc  static_d_desc =
1585{static_dtree, extra_dbits, 0,          D_CODES, MAX_BITS};
1586
1587local static_tree_desc  static_bl_desc =
1588{(ct_data *)0, extra_blbits, 0,      BL_CODES, MAX_BL_BITS};
1589
1590/* ===========================================================================
1591 * Local (static) routines in this file.
1592 */
1593
1594local void ct_static_init OF((void));
1595local void init_block     OF((deflate_state *s));
1596local void pqdownheap     OF((deflate_state *s, ct_data *tree, int k));
1597local void gen_bitlen     OF((deflate_state *s, tree_desc *desc));
1598local void gen_codes      OF((ct_data *tree, int max_code, ushf *bl_count));
1599local void build_tree     OF((deflate_state *s, tree_desc *desc));
1600local void scan_tree      OF((deflate_state *s, ct_data *tree, int max_code));
1601local void send_tree      OF((deflate_state *s, ct_data *tree, int max_code));
1602local int  build_bl_tree  OF((deflate_state *s));
1603local void send_all_trees OF((deflate_state *s, int lcodes, int dcodes,
1604                              int blcodes));
1605local void compress_block OF((deflate_state *s, ct_data *ltree,
1606                              ct_data *dtree));
1607local void set_data_type  OF((deflate_state *s));
1608local unsigned bi_reverse OF((unsigned value, int length));
1609local void bi_windup      OF((deflate_state *s));
1610local void bi_flush       OF((deflate_state *s));
1611local void copy_block     OF((deflate_state *s, charf *buf, unsigned len,
1612                              int header));
1613
1614#ifndef DEBUG_ZLIB
1615#  define send_code(s, c, tree) send_bits(s, tree[c].Code, tree[c].Len)
1616   /* Send a code of the given tree. c and tree must not have side effects */
1617
1618#else /* DEBUG_ZLIB */
1619#  define send_code(s, c, tree) \
1620     { if (verbose>1) fprintf(stderr,"\ncd %3d ",(c)); \
1621       send_bits(s, tree[c].Code, tree[c].Len); }
1622#endif
1623
1624#define d_code(dist) \
1625   ((dist) < 256 ? dist_code[dist] : dist_code[256+((dist)>>7)])
1626/* Mapping from a distance to a distance code. dist is the distance - 1 and
1627 * must not have side effects. dist_code[256] and dist_code[257] are never
1628 * used.
1629 */
1630
1631/* ===========================================================================
1632 * Output a short LSB first on the stream.
1633 * IN assertion: there is enough room in pendingBuf.
1634 */
1635#define put_short(s, w) { \
1636    put_byte(s, (uch)((w) & 0xff)); \
1637    put_byte(s, (uch)((ush)(w) >> 8)); \
1638}
1639
1640/* ===========================================================================
1641 * Send a value on a given number of bits.
1642 * IN assertion: length <= 16 and value fits in length bits.
1643 */
1644#ifdef DEBUG_ZLIB
1645local void send_bits      OF((deflate_state *s, int value, int length));
1646
1647local void send_bits(s, value, length)
1648    deflate_state *s;
1649    int value;  /* value to send */
1650    int length; /* number of bits */
1651{
1652    Tracev((stderr," l %2d v %4x ", length, value));
1653    Assert(length > 0 && length <= 15, "invalid length");
1654    s->bits_sent += (ulg)length;
1655
1656    /* If not enough room in bi_buf, use (valid) bits from bi_buf and
1657     * (16 - bi_valid) bits from value, leaving (width - (16-bi_valid))
1658     * unused bits in value.
1659     */
1660    if (s->bi_valid > (int)Buf_size - length) {
1661        s->bi_buf |= (value << s->bi_valid);
1662        put_short(s, s->bi_buf);
1663        s->bi_buf = (ush)value >> (Buf_size - s->bi_valid);
1664        s->bi_valid += length - Buf_size;
1665    } else {
1666        s->bi_buf |= value << s->bi_valid;
1667        s->bi_valid += length;
1668    }
1669}
1670#else /* !DEBUG_ZLIB */
1671
1672#define send_bits(s, value, length) \
1673{ int len = length;\
1674  if (s->bi_valid > (int)Buf_size - len) {\
1675    int val = value;\
1676    s->bi_buf |= (val << s->bi_valid);\
1677    put_short(s, s->bi_buf);\
1678    s->bi_buf = (ush)val >> (Buf_size - s->bi_valid);\
1679    s->bi_valid += len - Buf_size;\
1680  } else {\
1681    s->bi_buf |= (value) << s->bi_valid;\
1682    s->bi_valid += len;\
1683  }\
1684}
1685#endif /* DEBUG_ZLIB */
1686
1687
1688#define MAX(a,b) (a >= b ? a : b)
1689/* the arguments must not have side effects */
1690
1691/* ===========================================================================
1692 * Initialize the various 'constant' tables.
1693 * To do: do this at compile time.
1694 */
1695local void ct_static_init()
1696{
1697    int n;        /* iterates over tree elements */
1698    int bits;     /* bit counter */
1699    int length;   /* length value */
1700    int code;     /* code value */
1701    int dist;     /* distance index */
1702    ush bl_count[MAX_BITS+1];
1703    /* number of codes at each bit length for an optimal tree */
1704
1705    /* Initialize the mapping length (0..255) -> length code (0..28) */
1706    length = 0;
1707    for (code = 0; code < LENGTH_CODES-1; code++) {
1708        base_length[code] = length;
1709        for (n = 0; n < (1<<extra_lbits[code]); n++) {
1710            length_code[length++] = (uch)code;
1711        }
1712    }
1713    Assert (length == 256, "ct_static_init: length != 256");
1714    /* Note that the length 255 (match length 258) can be represented
1715     * in two different ways: code 284 + 5 bits or code 285, so we
1716     * overwrite length_code[255] to use the best encoding:
1717     */
1718    length_code[length-1] = (uch)code;
1719
1720    /* Initialize the mapping dist (0..32K) -> dist code (0..29) */
1721    dist = 0;
1722    for (code = 0 ; code < 16; code++) {
1723        base_dist[code] = dist;
1724        for (n = 0; n < (1<<extra_dbits[code]); n++) {
1725            dist_code[dist++] = (uch)code;
1726        }
1727    }
1728    Assert (dist == 256, "ct_static_init: dist != 256");
1729    dist >>= 7; /* from now on, all distances are divided by 128 */
1730    for ( ; code < D_CODES; code++) {
1731        base_dist[code] = dist << 7;
1732        for (n = 0; n < (1<<(extra_dbits[code]-7)); n++) {
1733            dist_code[256 + dist++] = (uch)code;
1734        }
1735    }
1736    Assert (dist == 256, "ct_static_init: 256+dist != 512");
1737
1738    /* Construct the codes of the static literal tree */
1739    for (bits = 0; bits <= MAX_BITS; bits++) bl_count[bits] = 0;
1740    n = 0;
1741    while (n <= 143) static_ltree[n++].Len = 8, bl_count[8]++;
1742    while (n <= 255) static_ltree[n++].Len = 9, bl_count[9]++;
1743    while (n <= 279) static_ltree[n++].Len = 7, bl_count[7]++;
1744    while (n <= 287) static_ltree[n++].Len = 8, bl_count[8]++;
1745    /* Codes 286 and 287 do not exist, but we must include them in the
1746     * tree construction to get a canonical Huffman tree (longest code
1747     * all ones)
1748     */
1749    gen_codes((ct_data *)static_ltree, L_CODES+1, bl_count);
1750
1751    /* The static distance tree is trivial: */
1752    for (n = 0; n < D_CODES; n++) {
1753        static_dtree[n].Len = 5;
1754        static_dtree[n].Code = bi_reverse(n, 5);
1755    }
1756}
1757
1758/* ===========================================================================
1759 * Initialize the tree data structures for a new zlib stream.
1760 */
1761local void ct_init(s)
1762    deflate_state *s;
1763{
1764    if (static_dtree[0].Len == 0) {
1765        ct_static_init();              /* To do: at compile time */
1766    }
1767
1768    s->compressed_len = 0L;
1769
1770    s->l_desc.dyn_tree = s->dyn_ltree;
1771    s->l_desc.stat_desc = &static_l_desc;
1772
1773    s->d_desc.dyn_tree = s->dyn_dtree;
1774    s->d_desc.stat_desc = &static_d_desc;
1775
1776    s->bl_desc.dyn_tree = s->bl_tree;
1777    s->bl_desc.stat_desc = &static_bl_desc;
1778
1779    s->bi_buf = 0;
1780    s->bi_valid = 0;
1781    s->last_eob_len = 8; /* enough lookahead for inflate */
1782#ifdef DEBUG_ZLIB
1783    s->bits_sent = 0L;
1784#endif
1785    s->blocks_in_packet = 0;
1786
1787    /* Initialize the first block of the first file: */
1788    init_block(s);
1789}
1790
1791/* ===========================================================================
1792 * Initialize a new block.
1793 */
1794local void init_block(s)
1795    deflate_state *s;
1796{
1797    int n; /* iterates over tree elements */
1798
1799    /* Initialize the trees. */
1800    for (n = 0; n < L_CODES;  n++) s->dyn_ltree[n].Freq = 0;
1801    for (n = 0; n < D_CODES;  n++) s->dyn_dtree[n].Freq = 0;
1802    for (n = 0; n < BL_CODES; n++) s->bl_tree[n].Freq = 0;
1803
1804    s->dyn_ltree[END_BLOCK].Freq = 1;
1805    s->opt_len = s->static_len = 0L;
1806    s->last_lit = s->matches = 0;
1807}
1808
1809#define SMALLEST 1
1810/* Index within the heap array of least frequent node in the Huffman tree */
1811
1812
1813/* ===========================================================================
1814 * Remove the smallest element from the heap and recreate the heap with
1815 * one less element. Updates heap and heap_len.
1816 */
1817#define pqremove(s, tree, top) \
1818{\
1819    top = s->heap[SMALLEST]; \
1820    s->heap[SMALLEST] = s->heap[s->heap_len--]; \
1821    pqdownheap(s, tree, SMALLEST); \
1822}
1823
1824/* ===========================================================================
1825 * Compares to subtrees, using the tree depth as tie breaker when
1826 * the subtrees have equal frequency. This minimizes the worst case length.
1827 */
1828#define smaller(tree, n, m, depth) \
1829   (tree[n].Freq < tree[m].Freq || \
1830   (tree[n].Freq == tree[m].Freq && depth[n] <= depth[m]))
1831
1832/* ===========================================================================
1833 * Restore the heap property by moving down the tree starting at node k,
1834 * exchanging a node with the smallest of its two sons if necessary, stopping
1835 * when the heap property is re-established (each father smaller than its
1836 * two sons).
1837 */
1838local void pqdownheap(s, tree, k)
1839    deflate_state *s;
1840    ct_data *tree;  /* the tree to restore */
1841    int k;               /* node to move down */
1842{
1843    int v = s->heap[k];
1844    int j = k << 1;  /* left son of k */
1845    while (j <= s->heap_len) {
1846        /* Set j to the smallest of the two sons: */
1847        if (j < s->heap_len &&
1848            smaller(tree, s->heap[j+1], s->heap[j], s->depth)) {
1849            j++;
1850        }
1851        /* Exit if v is smaller than both sons */
1852        if (smaller(tree, v, s->heap[j], s->depth)) break;
1853
1854        /* Exchange v with the smallest son */
1855        s->heap[k] = s->heap[j];  k = j;
1856
1857        /* And continue down the tree, setting j to the left son of k */
1858        j <<= 1;
1859    }
1860    s->heap[k] = v;
1861}
1862
1863/* ===========================================================================
1864 * Compute the optimal bit lengths for a tree and update the total bit length
1865 * for the current block.
1866 * IN assertion: the fields freq and dad are set, heap[heap_max] and
1867 *    above are the tree nodes sorted by increasing frequency.
1868 * OUT assertions: the field len is set to the optimal bit length, the
1869 *     array bl_count contains the frequencies for each bit length.
1870 *     The length opt_len is updated; static_len is also updated if stree is
1871 *     not null.
1872 */
1873local void gen_bitlen(s, desc)
1874    deflate_state *s;
1875    tree_desc *desc;    /* the tree descriptor */
1876{
1877    ct_data *tree  = desc->dyn_tree;
1878    int max_code   = desc->max_code;
1879    ct_data *stree = desc->stat_desc->static_tree;
1880    intf *extra    = desc->stat_desc->extra_bits;
1881    int base       = desc->stat_desc->extra_base;
1882    int max_length = desc->stat_desc->max_length;
1883    int h;              /* heap index */
1884    int n, m;           /* iterate over the tree elements */
1885    int bits;           /* bit length */
1886    int xbits;          /* extra bits */
1887    ush f;              /* frequency */
1888    int overflow = 0;   /* number of elements with bit length too large */
1889
1890    for (bits = 0; bits <= MAX_BITS; bits++) s->bl_count[bits] = 0;
1891
1892    /* In a first pass, compute the optimal bit lengths (which may
1893     * overflow in the case of the bit length tree).
1894     */
1895    tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */
1896
1897    for (h = s->heap_max+1; h < HEAP_SIZE; h++) {
1898        n = s->heap[h];
1899        bits = tree[tree[n].Dad].Len + 1;
1900        if (bits > max_length) bits = max_length, overflow++;
1901        tree[n].Len = (ush)bits;
1902        /* We overwrite tree[n].Dad which is no longer needed */
1903
1904        if (n > max_code) continue; /* not a leaf node */
1905
1906        s->bl_count[bits]++;
1907        xbits = 0;
1908        if (n >= base) xbits = extra[n-base];
1909        f = tree[n].Freq;
1910        s->opt_len += (ulg)f * (bits + xbits);
1911        if (stree) s->static_len += (ulg)f * (stree[n].Len + xbits);
1912    }
1913    if (overflow == 0) return;
1914
1915    Trace((stderr,"\nbit length overflow\n"));
1916    /* This happens for example on obj2 and pic of the Calgary corpus */
1917
1918    /* Find the first bit length which could increase: */
1919    do {
1920        bits = max_length-1;
1921        while (s->bl_count[bits] == 0) bits--;
1922        s->bl_count[bits]--;      /* move one leaf down the tree */
1923        s->bl_count[bits+1] += 2; /* move one overflow item as its brother */
1924        s->bl_count[max_length]--;
1925        /* The brother of the overflow item also moves one step up,
1926         * but this does not affect bl_count[max_length]
1927         */
1928        overflow -= 2;
1929    } while (overflow > 0);
1930
1931    /* Now recompute all bit lengths, scanning in increasing frequency.
1932     * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all
1933     * lengths instead of fixing only the wrong ones. This idea is taken
1934     * from 'ar' written by Haruhiko Okumura.)
1935     */
1936    for (bits = max_length; bits != 0; bits--) {
1937        n = s->bl_count[bits];
1938        while (n != 0) {
1939            m = s->heap[--h];
1940            if (m > max_code) continue;
1941            if (tree[m].Len != (unsigned) bits) {
1942                Trace((stderr,"code %d bits %d->%d\n", m, tree[m].Len, bits));
1943                s->opt_len += ((long)bits - (long)tree[m].Len)
1944                              *(long)tree[m].Freq;
1945                tree[m].Len = (ush)bits;
1946            }
1947            n--;
1948        }
1949    }
1950}
1951
1952/* ===========================================================================
1953 * Generate the codes for a given tree and bit counts (which need not be
1954 * optimal).
1955 * IN assertion: the array bl_count contains the bit length statistics for
1956 * the given tree and the field len is set for all tree elements.
1957 * OUT assertion: the field code is set for all tree elements of non
1958 *     zero code length.
1959 */
1960local void gen_codes (tree, max_code, bl_count)
1961    ct_data *tree;             /* the tree to decorate */
1962    int max_code;              /* largest code with non zero frequency */
1963    ushf *bl_count;            /* number of codes at each bit length */
1964{
1965    ush next_code[MAX_BITS+1]; /* next code value for each bit length */
1966    ush code = 0;              /* running code value */
1967    int bits;                  /* bit index */
1968    int n;                     /* code index */
1969
1970    /* The distribution counts are first used to generate the code values
1971     * without bit reversal.
1972     */
1973    for (bits = 1; bits <= MAX_BITS; bits++) {
1974        next_code[bits] = code = (code + bl_count[bits-1]) << 1;
1975    }
1976    /* Check that the bit counts in bl_count are consistent. The last code
1977     * must be all ones.
1978     */
1979    Assert (code + bl_count[MAX_BITS]-1 == (1<<MAX_BITS)-1,
1980            "inconsistent bit counts");
1981    Tracev((stderr,"\ngen_codes: max_code %d ", max_code));
1982
1983    for (n = 0;  n <= max_code; n++) {
1984        int len = tree[n].Len;
1985        if (len == 0) continue;
1986        /* Now reverse the bits */
1987        tree[n].Code = bi_reverse(next_code[len]++, len);
1988
1989        Tracec(tree != static_ltree, (stderr,"\nn %3d %c l %2d c %4x (%x) ",
1990             n, (isgraph(n) ? n : ' '), len, tree[n].Code, next_code[len]-1));
1991    }
1992}
1993
1994/* ===========================================================================
1995 * Construct one Huffman tree and assigns the code bit strings and lengths.
1996 * Update the total bit length for the current block.
1997 * IN assertion: the field freq is set for all tree elements.
1998 * OUT assertions: the fields len and code are set to the optimal bit length
1999 *     and corresponding code. The length opt_len is updated; static_len is
2000 *     also updated if stree is not null. The field max_code is set.
2001 */
2002local void build_tree(s, desc)
2003    deflate_state *s;
2004    tree_desc *desc; /* the tree descriptor */
2005{
2006    ct_data *tree   = desc->dyn_tree;
2007    ct_data *stree  = desc->stat_desc->static_tree;
2008    int elems       = desc->stat_desc->elems;
2009    int n, m;          /* iterate over heap elements */
2010    int max_code = -1; /* largest code with non zero frequency */
2011    int node;          /* new node being created */
2012
2013    /* Construct the initial heap, with least frequent element in
2014     * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1].
2015     * heap[0] is not used.
2016     */
2017    s->heap_len = 0, s->heap_max = HEAP_SIZE;
2018
2019    for (n = 0; n < elems; n++) {
2020        if (tree[n].Freq != 0) {
2021            s->heap[++(s->heap_len)] = max_code = n;
2022            s->depth[n] = 0;
2023        } else {
2024            tree[n].Len = 0;
2025        }
2026    }
2027
2028    /* The pkzip format requires that at least one distance code exists,
2029     * and that at least one bit should be sent even if there is only one
2030     * possible code. So to avoid special checks later on we force at least
2031     * two codes of non zero frequency.
2032     */
2033    while (s->heap_len < 2) {
2034        node = s->heap[++(s->heap_len)] = (max_code < 2 ? ++max_code : 0);
2035        tree[node].Freq = 1;
2036        s->depth[node] = 0;
2037        s->opt_len--; if (stree) s->static_len -= stree[node].Len;
2038        /* node is 0 or 1 so it does not have extra bits */
2039    }
2040    desc->max_code = max_code;
2041
2042    /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree,
2043     * establish sub-heaps of increasing lengths:
2044     */
2045    for (n = s->heap_len/2; n >= 1; n--) pqdownheap(s, tree, n);
2046
2047    /* Construct the Huffman tree by repeatedly combining the least two
2048     * frequent nodes.
2049     */
2050    node = elems;              /* next internal node of the tree */
2051    do {
2052        pqremove(s, tree, n);  /* n = node of least frequency */
2053        m = s->heap[SMALLEST]; /* m = node of next least frequency */
2054
2055        s->heap[--(s->heap_max)] = n; /* keep the nodes sorted by frequency */
2056        s->heap[--(s->heap_max)] = m;
2057
2058        /* Create a new node father of n and m */
2059        tree[node].Freq = tree[n].Freq + tree[m].Freq;
2060        s->depth[node] = (uch) (MAX(s->depth[n], s->depth[m]) + 1);
2061        tree[n].Dad = tree[m].Dad = (ush)node;
2062#ifdef DUMP_BL_TREE
2063        if (tree == s->bl_tree) {
2064            fprintf(stderr,"\nnode %d(%d), sons %d(%d) %d(%d)",
2065                    node, tree[node].Freq, n, tree[n].Freq, m, tree[m].Freq);
2066        }
2067#endif
2068        /* and insert the new node in the heap */
2069        s->heap[SMALLEST] = node++;
2070        pqdownheap(s, tree, SMALLEST);
2071
2072    } while (s->heap_len >= 2);
2073
2074    s->heap[--(s->heap_max)] = s->heap[SMALLEST];
2075
2076    /* At this point, the fields freq and dad are set. We can now
2077     * generate the bit lengths.
2078     */
2079    gen_bitlen(s, (tree_desc *)desc);
2080
2081    /* The field len is now set, we can generate the bit codes */
2082    gen_codes ((ct_data *)tree, max_code, s->bl_count);
2083}
2084
2085/* ===========================================================================
2086 * Scan a literal or distance tree to determine the frequencies of the codes
2087 * in the bit length tree.
2088 */
2089local void scan_tree (s, tree, max_code)
2090    deflate_state *s;
2091    ct_data *tree;   /* the tree to be scanned */
2092    int max_code;    /* and its largest code of non zero frequency */
2093{
2094    int n;                     /* iterates over all tree elements */
2095    int prevlen = -1;          /* last emitted length */
2096    int curlen;                /* length of current code */
2097    int nextlen = tree[0].Len; /* length of next code */
2098    int count = 0;             /* repeat count of the current code */
2099    int max_count = 7;         /* max repeat count */
2100    int min_count = 4;         /* min repeat count */
2101
2102    if (nextlen == 0) max_count = 138, min_count = 3;
2103    tree[max_code+1].Len = (ush)0xffff; /* guard */
2104
2105    for (n = 0; n <= max_code; n++) {
2106        curlen = nextlen; nextlen = tree[n+1].Len;
2107        if (++count < max_count && curlen == nextlen) {
2108            continue;
2109        } else if (count < min_count) {
2110            s->bl_tree[curlen].Freq += count;
2111        } else if (curlen != 0) {
2112            if (curlen != prevlen) s->bl_tree[curlen].Freq++;
2113            s->bl_tree[REP_3_6].Freq++;
2114        } else if (count <= 10) {
2115            s->bl_tree[REPZ_3_10].Freq++;
2116        } else {
2117            s->bl_tree[REPZ_11_138].Freq++;
2118        }
2119        count = 0; prevlen = curlen;
2120        if (nextlen == 0) {
2121            max_count = 138, min_count = 3;
2122        } else if (curlen == nextlen) {
2123            max_count = 6, min_count = 3;
2124        } else {
2125            max_count = 7, min_count = 4;
2126        }
2127    }
2128}
2129
2130/* ===========================================================================
2131 * Send a literal or distance tree in compressed form, using the codes in
2132 * bl_tree.
2133 */
2134local void send_tree (s, tree, max_code)
2135    deflate_state *s;
2136    ct_data *tree; /* the tree to be scanned */
2137    int max_code;       /* and its largest code of non zero frequency */
2138{
2139    int n;                     /* iterates over all tree elements */
2140    int prevlen = -1;          /* last emitted length */
2141    int curlen;                /* length of current code */
2142    int nextlen = tree[0].Len; /* length of next code */
2143    int count = 0;             /* repeat count of the current code */
2144    int max_count = 7;         /* max repeat count */
2145    int min_count = 4;         /* min repeat count */
2146
2147    /* tree[max_code+1].Len = -1; */  /* guard already set */
2148    if (nextlen == 0) max_count = 138, min_count = 3;
2149
2150    for (n = 0; n <= max_code; n++) {
2151        curlen = nextlen; nextlen = tree[n+1].Len;
2152        if (++count < max_count && curlen == nextlen) {
2153            continue;
2154        } else if (count < min_count) {
2155            do { send_code(s, curlen, s->bl_tree); } while (--count != 0);
2156
2157        } else if (curlen != 0) {
2158            if (curlen != prevlen) {
2159                send_code(s, curlen, s->bl_tree); count--;
2160            }
2161            Assert(count >= 3 && count <= 6, " 3_6?");
2162            send_code(s, REP_3_6, s->bl_tree); send_bits(s, count-3, 2);
2163
2164        } else if (count <= 10) {
2165            send_code(s, REPZ_3_10, s->bl_tree); send_bits(s, count-3, 3);
2166
2167        } else {
2168            send_code(s, REPZ_11_138, s->bl_tree); send_bits(s, count-11, 7);
2169        }
2170        count = 0; prevlen = curlen;
2171        if (nextlen == 0) {
2172            max_count = 138, min_count = 3;
2173        } else if (curlen == nextlen) {
2174            max_count = 6, min_count = 3;
2175        } else {
2176            max_count = 7, min_count = 4;
2177        }
2178    }
2179}
2180
2181/* ===========================================================================
2182 * Construct the Huffman tree for the bit lengths and return the index in
2183 * bl_order of the last bit length code to send.
2184 */
2185local int build_bl_tree(s)
2186    deflate_state *s;
2187{
2188    int max_blindex;  /* index of last bit length code of non zero freq */
2189
2190    /* Determine the bit length frequencies for literal and distance trees */
2191    scan_tree(s, (ct_data *)s->dyn_ltree, s->l_desc.max_code);
2192    scan_tree(s, (ct_data *)s->dyn_dtree, s->d_desc.max_code);
2193
2194    /* Build the bit length tree: */
2195    build_tree(s, (tree_desc *)(&(s->bl_desc)));
2196    /* opt_len now includes the length of the tree representations, except
2197     * the lengths of the bit lengths codes and the 5+5+4 bits for the counts.
2198     */
2199
2200    /* Determine the number of bit length codes to send. The pkzip format
2201     * requires that at least 4 bit length codes be sent. (appnote.txt says
2202     * 3 but the actual value used is 4.)
2203     */
2204    for (max_blindex = BL_CODES-1; max_blindex >= 3; max_blindex--) {
2205        if (s->bl_tree[bl_order[max_blindex]].Len != 0) break;
2206    }
2207    /* Update opt_len to include the bit length tree and counts */
2208    s->opt_len += 3*(max_blindex+1) + 5+5+4;
2209    Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld",
2210            s->opt_len, s->static_len));
2211
2212    return max_blindex;
2213}
2214
2215/* ===========================================================================
2216 * Send the header for a block using dynamic Huffman trees: the counts, the
2217 * lengths of the bit length codes, the literal tree and the distance tree.
2218 * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4.
2219 */
2220local void send_all_trees(s, lcodes, dcodes, blcodes)
2221    deflate_state *s;
2222    int lcodes, dcodes, blcodes; /* number of codes for each tree */
2223{
2224    int rank;                    /* index in bl_order */
2225
2226    Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes");
2227    Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES,
2228            "too many codes");
2229    Tracev((stderr, "\nbl counts: "));
2230    send_bits(s, lcodes-257, 5); /* not +255 as stated in appnote.txt */
2231    send_bits(s, dcodes-1,   5);
2232    send_bits(s, blcodes-4,  4); /* not -3 as stated in appnote.txt */
2233    for (rank = 0; rank < blcodes; rank++) {
2234        Tracev((stderr, "\nbl code %2d ", bl_order[rank]));
2235        send_bits(s, s->bl_tree[bl_order[rank]].Len, 3);
2236    }
2237    Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent));
2238
2239    send_tree(s, (ct_data *)s->dyn_ltree, lcodes-1); /* literal tree */
2240    Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent));
2241
2242    send_tree(s, (ct_data *)s->dyn_dtree, dcodes-1); /* distance tree */
2243    Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent));
2244}
2245
2246/* ===========================================================================
2247 * Send a stored block
2248 */
2249local void ct_stored_block(s, buf, stored_len, eof)
2250    deflate_state *s;
2251    charf *buf;       /* input block */
2252    ulg stored_len;   /* length of input block */
2253    int eof;          /* true if this is the last block for a file */
2254{
2255    send_bits(s, (STORED_BLOCK<<1)+eof, 3);  /* send block type */
2256    s->compressed_len = (s->compressed_len + 3 + 7) & ~7L;
2257    s->compressed_len += (stored_len + 4) << 3;
2258
2259    copy_block(s, buf, (unsigned)stored_len, 1); /* with header */
2260}
2261
2262/* Send just the `stored block' type code without any length bytes or data.
2263 */
2264local void ct_stored_type_only(s)
2265    deflate_state *s;
2266{
2267    send_bits(s, (STORED_BLOCK << 1), 3);
2268    bi_windup(s);
2269    s->compressed_len = (s->compressed_len + 3) & ~7L;
2270}
2271
2272
2273/* ===========================================================================
2274 * Send one empty static block to give enough lookahead for inflate.
2275 * This takes 10 bits, of which 7 may remain in the bit buffer.
2276 * The current inflate code requires 9 bits of lookahead. If the EOB
2277 * code for the previous block was coded on 5 bits or less, inflate
2278 * may have only 5+3 bits of lookahead to decode this EOB.
2279 * (There are no problems if the previous block is stored or fixed.)
2280 */
2281local void ct_align(s)
2282    deflate_state *s;
2283{
2284    send_bits(s, STATIC_TREES<<1, 3);
2285    send_code(s, END_BLOCK, static_ltree);
2286    s->compressed_len += 10L; /* 3 for block type, 7 for EOB */
2287    bi_flush(s);
2288    /* Of the 10 bits for the empty block, we have already sent
2289     * (10 - bi_valid) bits. The lookahead for the EOB of the previous
2290     * block was thus its length plus what we have just sent.
2291     */
2292    if (s->last_eob_len + 10 - s->bi_valid < 9) {
2293        send_bits(s, STATIC_TREES<<1, 3);
2294        send_code(s, END_BLOCK, static_ltree);
2295        s->compressed_len += 10L;
2296        bi_flush(s);
2297    }
2298    s->last_eob_len = 7;
2299}
2300
2301/* ===========================================================================
2302 * Determine the best encoding for the current block: dynamic trees, static
2303 * trees or store, and output the encoded block to the zip file. This function
2304 * returns the total compressed length for the file so far.
2305 */
2306local ulg ct_flush_block(s, buf, stored_len, flush)
2307    deflate_state *s;
2308    charf *buf;       /* input block, or NULL if too old */
2309    ulg stored_len;   /* length of input block */
2310    int flush;        /* Z_FINISH if this is the last block for a file */
2311{
2312    ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */
2313    int max_blindex;  /* index of last bit length code of non zero freq */
2314    int eof = flush == Z_FINISH;
2315
2316    ++s->blocks_in_packet;
2317
2318    /* Check if the file is ascii or binary */
2319    if (s->data_type == UNKNOWN) set_data_type(s);
2320
2321    /* Construct the literal and distance trees */
2322    build_tree(s, (tree_desc *)(&(s->l_desc)));
2323    Tracev((stderr, "\nlit data: dyn %ld, stat %ld", s->opt_len,
2324            s->static_len));
2325
2326    build_tree(s, (tree_desc *)(&(s->d_desc)));
2327    Tracev((stderr, "\ndist data: dyn %ld, stat %ld", s->opt_len,
2328            s->static_len));
2329    /* At this point, opt_len and static_len are the total bit lengths of
2330     * the compressed block data, excluding the tree representations.
2331     */
2332
2333    /* Build the bit length tree for the above two trees, and get the index
2334     * in bl_order of the last bit length code to send.
2335     */
2336    max_blindex = build_bl_tree(s);
2337
2338    /* Determine the best encoding. Compute first the block length in bytes */
2339    opt_lenb = (s->opt_len+3+7)>>3;
2340    static_lenb = (s->static_len+3+7)>>3;
2341
2342    Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ",
2343            opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len,
2344            s->last_lit));
2345
2346    if (static_lenb <= opt_lenb) opt_lenb = static_lenb;
2347
2348    /* If compression failed and this is the first and last block,
2349     * and if the .zip file can be seeked (to rewrite the local header),
2350     * the whole file is transformed into a stored file:
2351     */
2352#ifdef STORED_FILE_OK
2353#  ifdef FORCE_STORED_FILE
2354    if (eof && compressed_len == 0L) /* force stored file */
2355#  else
2356    if (stored_len <= opt_lenb && eof && s->compressed_len==0L && seekable())
2357#  endif
2358    {
2359        /* Since LIT_BUFSIZE <= 2*WSIZE, the input data must be there: */
2360        if (buf == (charf*)0) error ("block vanished");
2361
2362        copy_block(buf, (unsigned)stored_len, 0); /* without header */
2363        s->compressed_len = stored_len << 3;
2364        s->method = STORED;
2365    } else
2366#endif /* STORED_FILE_OK */
2367
2368    /* For Z_PACKET_FLUSH, if we don't achieve the required minimum
2369     * compression, and this block contains all the data since the last
2370     * time we used Z_PACKET_FLUSH, then just omit this block completely
2371     * from the output.
2372     */
2373    if (flush == Z_PACKET_FLUSH && s->blocks_in_packet == 1
2374	&& opt_lenb > stored_len - s->minCompr) {
2375	s->blocks_in_packet = 0;
2376	/* output nothing */
2377    } else
2378
2379#ifdef FORCE_STORED
2380    if (buf != (char*)0) /* force stored block */
2381#else
2382    if (stored_len+4 <= opt_lenb && buf != (char*)0)
2383                       /* 4: two words for the lengths */
2384#endif
2385    {
2386        /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE.
2387         * Otherwise we can't have processed more than WSIZE input bytes since
2388         * the last block flush, because compression would have been
2389         * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to
2390         * transform a block into a stored block.
2391         */
2392        ct_stored_block(s, buf, stored_len, eof);
2393    } else
2394
2395#ifdef FORCE_STATIC
2396    if (static_lenb >= 0) /* force static trees */
2397#else
2398    if (static_lenb == opt_lenb)
2399#endif
2400    {
2401        send_bits(s, (STATIC_TREES<<1)+eof, 3);
2402        compress_block(s, (ct_data *)static_ltree, (ct_data *)static_dtree);
2403        s->compressed_len += 3 + s->static_len;
2404    } else {
2405        send_bits(s, (DYN_TREES<<1)+eof, 3);
2406        send_all_trees(s, s->l_desc.max_code+1, s->d_desc.max_code+1,
2407                       max_blindex+1);
2408        compress_block(s, (ct_data *)s->dyn_ltree, (ct_data *)s->dyn_dtree);
2409        s->compressed_len += 3 + s->opt_len;
2410    }
2411    Assert (s->compressed_len == s->bits_sent, "bad compressed size");
2412    init_block(s);
2413
2414    if (eof) {
2415        bi_windup(s);
2416        s->compressed_len += 7;  /* align on byte boundary */
2417    }
2418    Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3,
2419           s->compressed_len-7*eof));
2420
2421    return s->compressed_len >> 3;
2422}
2423
2424/* ===========================================================================
2425 * Save the match info and tally the frequency counts. Return true if
2426 * the current block must be flushed.
2427 */
2428local int ct_tally (s, dist, lc)
2429    deflate_state *s;
2430    int dist;  /* distance of matched string */
2431    int lc;    /* match length-MIN_MATCH or unmatched char (if dist==0) */
2432{
2433    s->d_buf[s->last_lit] = (ush)dist;
2434    s->l_buf[s->last_lit++] = (uch)lc;
2435    if (dist == 0) {
2436        /* lc is the unmatched char */
2437        s->dyn_ltree[lc].Freq++;
2438    } else {
2439        s->matches++;
2440        /* Here, lc is the match length - MIN_MATCH */
2441        dist--;             /* dist = match distance - 1 */
2442        Assert((ush)dist < (ush)MAX_DIST(s) &&
2443               (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) &&
2444               (ush)d_code(dist) < (ush)D_CODES,  "ct_tally: bad match");
2445
2446        s->dyn_ltree[length_code[lc]+LITERALS+1].Freq++;
2447        s->dyn_dtree[d_code(dist)].Freq++;
2448    }
2449
2450    /* Try to guess if it is profitable to stop the current block here */
2451    if (s->level > 2 && (s->last_lit & 0xfff) == 0) {
2452        /* Compute an upper bound for the compressed length */
2453        ulg out_length = (ulg)s->last_lit*8L;
2454        ulg in_length = (ulg)s->strstart - s->block_start;
2455        int dcode;
2456        for (dcode = 0; dcode < D_CODES; dcode++) {
2457            out_length += (ulg)s->dyn_dtree[dcode].Freq *
2458                (5L+extra_dbits[dcode]);
2459        }
2460        out_length >>= 3;
2461        Tracev((stderr,"\nlast_lit %u, in %ld, out ~%ld(%ld%%) ",
2462               s->last_lit, in_length, out_length,
2463               100L - out_length*100L/in_length));
2464        if (s->matches < s->last_lit/2 && out_length < in_length/2) return 1;
2465    }
2466    return (s->last_lit == s->lit_bufsize-1);
2467    /* We avoid equality with lit_bufsize because of wraparound at 64K
2468     * on 16 bit machines and because stored blocks are restricted to
2469     * 64K-1 bytes.
2470     */
2471}
2472
2473/* ===========================================================================
2474 * Send the block data compressed using the given Huffman trees
2475 */
2476local void compress_block(s, ltree, dtree)
2477    deflate_state *s;
2478    ct_data *ltree; /* literal tree */
2479    ct_data *dtree; /* distance tree */
2480{
2481    unsigned dist;      /* distance of matched string */
2482    int lc;             /* match length or unmatched char (if dist == 0) */
2483    unsigned lx = 0;    /* running index in l_buf */
2484    unsigned code;      /* the code to send */
2485    int extra;          /* number of extra bits to send */
2486
2487    if (s->last_lit != 0) do {
2488        dist = s->d_buf[lx];
2489        lc = s->l_buf[lx++];
2490        if (dist == 0) {
2491            send_code(s, lc, ltree); /* send a literal byte */
2492            Tracecv(isgraph(lc), (stderr," '%c' ", lc));
2493        } else {
2494            /* Here, lc is the match length - MIN_MATCH */
2495            code = length_code[lc];
2496            send_code(s, code+LITERALS+1, ltree); /* send the length code */
2497            extra = extra_lbits[code];
2498            if (extra != 0) {
2499                lc -= base_length[code];
2500                send_bits(s, lc, extra);       /* send the extra length bits */
2501            }
2502            dist--; /* dist is now the match distance - 1 */
2503            code = d_code(dist);
2504            Assert (code < D_CODES, "bad d_code");
2505
2506            send_code(s, code, dtree);       /* send the distance code */
2507            extra = extra_dbits[code];
2508            if (extra != 0) {
2509                dist -= base_dist[code];
2510                send_bits(s, dist, extra);   /* send the extra distance bits */
2511            }
2512        } /* literal or match pair ? */
2513
2514        /* Check that the overlay between pending_buf and d_buf+l_buf is ok: */
2515        Assert(s->pending < s->lit_bufsize + 2*lx, "pendingBuf overflow");
2516
2517    } while (lx < s->last_lit);
2518
2519    send_code(s, END_BLOCK, ltree);
2520    s->last_eob_len = ltree[END_BLOCK].Len;
2521}
2522
2523/* ===========================================================================
2524 * Set the data type to ASCII or Z_BINARY, using a crude approximation:
2525 * binary if more than 20% of the bytes are <= 6 or >= 128, ascii otherwise.
2526 * IN assertion: the fields freq of dyn_ltree are set and the total of all
2527 * frequencies does not exceed 64K (to fit in an int on 16 bit machines).
2528 */
2529local void set_data_type(s)
2530    deflate_state *s;
2531{
2532    int n = 0;
2533    unsigned ascii_freq = 0;
2534    unsigned bin_freq = 0;
2535    while (n < 7)        bin_freq += s->dyn_ltree[n++].Freq;
2536    while (n < 128)    ascii_freq += s->dyn_ltree[n++].Freq;
2537    while (n < LITERALS) bin_freq += s->dyn_ltree[n++].Freq;
2538    s->data_type = (Byte)(bin_freq > (ascii_freq >> 2) ? Z_BINARY : ASCII);
2539}
2540
2541/* ===========================================================================
2542 * Reverse the first len bits of a code, using straightforward code (a faster
2543 * method would use a table)
2544 * IN assertion: 1 <= len <= 15
2545 */
2546local unsigned bi_reverse(code, len)
2547    unsigned code; /* the value to invert */
2548    int len;       /* its bit length */
2549{
2550    register unsigned res = 0;
2551    do {
2552        res |= code & 1;
2553        code >>= 1, res <<= 1;
2554    } while (--len > 0);
2555    return res >> 1;
2556}
2557
2558/* ===========================================================================
2559 * Flush the bit buffer, keeping at most 7 bits in it.
2560 */
2561local void bi_flush(s)
2562    deflate_state *s;
2563{
2564    if (s->bi_valid == 16) {
2565        put_short(s, s->bi_buf);
2566        s->bi_buf = 0;
2567        s->bi_valid = 0;
2568    } else if (s->bi_valid >= 8) {
2569        put_byte(s, (Byte)s->bi_buf);
2570        s->bi_buf >>= 8;
2571        s->bi_valid -= 8;
2572    }
2573}
2574
2575/* ===========================================================================
2576 * Flush the bit buffer and align the output on a byte boundary
2577 */
2578local void bi_windup(s)
2579    deflate_state *s;
2580{
2581    if (s->bi_valid > 8) {
2582        put_short(s, s->bi_buf);
2583    } else if (s->bi_valid > 0) {
2584        put_byte(s, (Byte)s->bi_buf);
2585    }
2586    s->bi_buf = 0;
2587    s->bi_valid = 0;
2588#ifdef DEBUG_ZLIB
2589    s->bits_sent = (s->bits_sent+7) & ~7;
2590#endif
2591}
2592
2593/* ===========================================================================
2594 * Copy a stored block, storing first the length and its
2595 * one's complement if requested.
2596 */
2597local void copy_block(s, buf, len, header)
2598    deflate_state *s;
2599    charf    *buf;    /* the input data */
2600    unsigned len;     /* its length */
2601    int      header;  /* true if block header must be written */
2602{
2603    bi_windup(s);        /* align on byte boundary */
2604    s->last_eob_len = 8; /* enough lookahead for inflate */
2605
2606    if (header) {
2607        put_short(s, (ush)len);
2608        put_short(s, (ush)~len);
2609#ifdef DEBUG_ZLIB
2610        s->bits_sent += 2*16;
2611#endif
2612    }
2613#ifdef DEBUG_ZLIB
2614    s->bits_sent += (ulg)len<<3;
2615#endif
2616    while (len--) {
2617        put_byte(s, *buf++);
2618    }
2619}
2620
2621
2622/*+++++*/
2623/* infblock.h -- header to use infblock.c
2624 * Copyright (C) 1995 Mark Adler
2625 * For conditions of distribution and use, see copyright notice in zlib.h
2626 */
2627
2628/* WARNING: this file should *not* be used by applications. It is
2629   part of the implementation of the compression library and is
2630   subject to change. Applications should only use zlib.h.
2631 */
2632
2633struct inflate_blocks_state;
2634typedef struct inflate_blocks_state FAR inflate_blocks_statef;
2635
2636local inflate_blocks_statef * inflate_blocks_new OF((
2637    z_stream *z,
2638    check_func c,               /* check function */
2639    uInt w));                   /* window size */
2640
2641local int inflate_blocks OF((
2642    inflate_blocks_statef *,
2643    z_stream *,
2644    int));                      /* initial return code */
2645
2646local void inflate_blocks_reset OF((
2647    inflate_blocks_statef *,
2648    z_stream *,
2649    uLongf *));                  /* check value on output */
2650
2651local int inflate_blocks_free OF((
2652    inflate_blocks_statef *,
2653    z_stream *,
2654    uLongf *));                  /* check value on output */
2655
2656local int inflate_addhistory OF((
2657    inflate_blocks_statef *,
2658    z_stream *));
2659
2660local int inflate_packet_flush OF((
2661    inflate_blocks_statef *));
2662
2663/*+++++*/
2664/* inftrees.h -- header to use inftrees.c
2665 * Copyright (C) 1995 Mark Adler
2666 * For conditions of distribution and use, see copyright notice in zlib.h
2667 */
2668
2669/* WARNING: this file should *not* be used by applications. It is
2670   part of the implementation of the compression library and is
2671   subject to change. Applications should only use zlib.h.
2672 */
2673
2674/* Huffman code lookup table entry--this entry is four bytes for machines
2675   that have 16-bit pointers (e.g. PC's in the small or medium model). */
2676
2677typedef struct inflate_huft_s FAR inflate_huft;
2678
2679struct inflate_huft_s {
2680  union {
2681    struct {
2682      Byte Exop;        /* number of extra bits or operation */
2683      Byte Bits;        /* number of bits in this code or subcode */
2684    } what;
2685    uInt Nalloc;	/* number of these allocated here */
2686    Bytef *pad;         /* pad structure to a power of 2 (4 bytes for */
2687  } word;               /*  16-bit, 8 bytes for 32-bit machines) */
2688  union {
2689    uInt Base;          /* literal, length base, or distance base */
2690    inflate_huft *Next; /* pointer to next level of table */
2691  } more;
2692};
2693
2694#ifdef DEBUG_ZLIB
2695  local uInt inflate_hufts;
2696#endif
2697
2698local int inflate_trees_bits OF((
2699    uIntf *,                    /* 19 code lengths */
2700    uIntf *,                    /* bits tree desired/actual depth */
2701    inflate_huft * FAR *,       /* bits tree result */
2702    z_stream *));               /* for zalloc, zfree functions */
2703
2704local int inflate_trees_dynamic OF((
2705    uInt,                       /* number of literal/length codes */
2706    uInt,                       /* number of distance codes */
2707    uIntf *,                    /* that many (total) code lengths */
2708    uIntf *,                    /* literal desired/actual bit depth */
2709    uIntf *,                    /* distance desired/actual bit depth */
2710    inflate_huft * FAR *,       /* literal/length tree result */
2711    inflate_huft * FAR *,       /* distance tree result */
2712    z_stream *));               /* for zalloc, zfree functions */
2713
2714local int inflate_trees_fixed OF((
2715    uIntf *,                    /* literal desired/actual bit depth */
2716    uIntf *,                    /* distance desired/actual bit depth */
2717    inflate_huft * FAR *,       /* literal/length tree result */
2718    inflate_huft * FAR *));     /* distance tree result */
2719
2720local int inflate_trees_free OF((
2721    inflate_huft *,             /* tables to free */
2722    z_stream *));               /* for zfree function */
2723
2724
2725/*+++++*/
2726/* infcodes.h -- header to use infcodes.c
2727 * Copyright (C) 1995 Mark Adler
2728 * For conditions of distribution and use, see copyright notice in zlib.h
2729 */
2730
2731/* WARNING: this file should *not* be used by applications. It is
2732   part of the implementation of the compression library and is
2733   subject to change. Applications should only use zlib.h.
2734 */
2735
2736struct inflate_codes_state;
2737typedef struct inflate_codes_state FAR inflate_codes_statef;
2738
2739local inflate_codes_statef *inflate_codes_new OF((
2740    uInt, uInt,
2741    inflate_huft *, inflate_huft *,
2742    z_stream *));
2743
2744local int inflate_codes OF((
2745    inflate_blocks_statef *,
2746    z_stream *,
2747    int));
2748
2749local void inflate_codes_free OF((
2750    inflate_codes_statef *,
2751    z_stream *));
2752
2753
2754/*+++++*/
2755/* inflate.c -- zlib interface to inflate modules
2756 * Copyright (C) 1995 Mark Adler
2757 * For conditions of distribution and use, see copyright notice in zlib.h
2758 */
2759
2760/* inflate private state */
2761struct internal_state {
2762
2763  /* mode */
2764  enum {
2765      METHOD,   /* waiting for method byte */
2766      FLAG,     /* waiting for flag byte */
2767      BLOCKS,   /* decompressing blocks */
2768      CHECK4,   /* four check bytes to go */
2769      CHECK3,   /* three check bytes to go */
2770      CHECK2,   /* two check bytes to go */
2771      CHECK1,   /* one check byte to go */
2772      DONE,     /* finished check, done */
2773      BAD}      /* got an error--stay here */
2774    mode;               /* current inflate mode */
2775
2776  /* mode dependent information */
2777  union {
2778    uInt method;        /* if FLAGS, method byte */
2779    struct {
2780      uLong was;                /* computed check value */
2781      uLong need;               /* stream check value */
2782    } check;            /* if CHECK, check values to compare */
2783    uInt marker;        /* if BAD, inflateSync's marker bytes count */
2784  } sub;        /* submode */
2785
2786  /* mode independent information */
2787  int  nowrap;          /* flag for no wrapper */
2788  uInt wbits;           /* log2(window size)  (8..15, defaults to 15) */
2789  inflate_blocks_statef
2790    *blocks;            /* current inflate_blocks state */
2791
2792};
2793
2794
2795int inflateReset(z)
2796z_stream *z;
2797{
2798  uLong c;
2799
2800  if (z == Z_NULL || z->state == Z_NULL)
2801    return Z_STREAM_ERROR;
2802  z->total_in = z->total_out = 0;
2803  z->msg = Z_NULL;
2804  z->state->mode = z->state->nowrap ? BLOCKS : METHOD;
2805  inflate_blocks_reset(z->state->blocks, z, &c);
2806  Trace((stderr, "inflate: reset\n"));
2807  return Z_OK;
2808}
2809
2810
2811int inflateEnd(z)
2812z_stream *z;
2813{
2814  uLong c;
2815
2816  if (z == Z_NULL || z->state == Z_NULL || z->zfree == Z_NULL)
2817    return Z_STREAM_ERROR;
2818  if (z->state->blocks != Z_NULL)
2819    inflate_blocks_free(z->state->blocks, z, &c);
2820  ZFREE(z, z->state, sizeof(struct internal_state));
2821  z->state = Z_NULL;
2822  Trace((stderr, "inflate: end\n"));
2823  return Z_OK;
2824}
2825
2826
2827int inflateInit2(z, w)
2828z_stream *z;
2829int w;
2830{
2831  /* initialize state */
2832  if (z == Z_NULL)
2833    return Z_STREAM_ERROR;
2834/*  if (z->zalloc == Z_NULL) z->zalloc = zcalloc; */
2835/*  if (z->zfree == Z_NULL) z->zfree = zcfree; */
2836  if ((z->state = (struct internal_state FAR *)
2837       ZALLOC_INIT(z,1,sizeof(struct internal_state))) == Z_NULL)
2838    return Z_MEM_ERROR;
2839  z->state->blocks = Z_NULL;
2840
2841  /* handle undocumented nowrap option (no zlib header or check) */
2842  z->state->nowrap = 0;
2843  if (w < 0)
2844  {
2845    w = - w;
2846    z->state->nowrap = 1;
2847  }
2848
2849  /* set window size */
2850  if (w < 8 || w > 15)
2851  {
2852    inflateEnd(z);
2853    return Z_STREAM_ERROR;
2854  }
2855  z->state->wbits = (uInt)w;
2856
2857  /* create inflate_blocks state */
2858  if ((z->state->blocks =
2859       inflate_blocks_new(z, z->state->nowrap ? Z_NULL : adler32, 1 << w))
2860      == Z_NULL)
2861  {
2862    inflateEnd(z);
2863    return Z_MEM_ERROR;
2864  }
2865  Trace((stderr, "inflate: allocated\n"));
2866
2867  /* reset state */
2868  inflateReset(z);
2869  return Z_OK;
2870}
2871
2872
2873int inflateInit(z)
2874z_stream *z;
2875{
2876  return inflateInit2(z, DEF_WBITS);
2877}
2878
2879
2880#define NEEDBYTE {if(z->avail_in==0)goto empty;r=Z_OK;}
2881#define NEXTBYTE (z->avail_in--,z->total_in++,*z->next_in++)
2882
2883int inflate(z, f)
2884z_stream *z;
2885int f;
2886{
2887  int r;
2888  uInt b;
2889
2890  if (z == Z_NULL || z->next_in == Z_NULL)
2891    return Z_STREAM_ERROR;
2892  r = Z_BUF_ERROR;
2893  while (1) switch (z->state->mode)
2894  {
2895    case METHOD:
2896      NEEDBYTE
2897      if (((z->state->sub.method = NEXTBYTE) & 0xf) != DEFLATED)
2898      {
2899        z->state->mode = BAD;
2900        z->msg = "unknown compression method";
2901        z->state->sub.marker = 5;       /* can't try inflateSync */
2902        break;
2903      }
2904      if ((z->state->sub.method >> 4) + 8 > z->state->wbits)
2905      {
2906        z->state->mode = BAD;
2907        z->msg = "invalid window size";
2908        z->state->sub.marker = 5;       /* can't try inflateSync */
2909        break;
2910      }
2911      z->state->mode = FLAG;
2912    case FLAG:
2913      NEEDBYTE
2914      if ((b = NEXTBYTE) & 0x20)
2915      {
2916        z->state->mode = BAD;
2917        z->msg = "invalid reserved bit";
2918        z->state->sub.marker = 5;       /* can't try inflateSync */
2919        break;
2920      }
2921      if (((z->state->sub.method << 8) + b) % 31)
2922      {
2923        z->state->mode = BAD;
2924        z->msg = "incorrect header check";
2925        z->state->sub.marker = 5;       /* can't try inflateSync */
2926        break;
2927      }
2928      Trace((stderr, "inflate: zlib header ok\n"));
2929      z->state->mode = BLOCKS;
2930    case BLOCKS:
2931      r = inflate_blocks(z->state->blocks, z, r);
2932      if (f == Z_PACKET_FLUSH && z->avail_in == 0 && z->avail_out != 0)
2933	  r = inflate_packet_flush(z->state->blocks);
2934      if (r == Z_DATA_ERROR)
2935      {
2936        z->state->mode = BAD;
2937        z->state->sub.marker = 0;       /* can try inflateSync */
2938        break;
2939      }
2940      if (r != Z_STREAM_END)
2941        return r;
2942      r = Z_OK;
2943      inflate_blocks_reset(z->state->blocks, z, &z->state->sub.check.was);
2944      if (z->state->nowrap)
2945      {
2946        z->state->mode = DONE;
2947        break;
2948      }
2949      z->state->mode = CHECK4;
2950    case CHECK4:
2951      NEEDBYTE
2952      z->state->sub.check.need = (uLong)NEXTBYTE << 24;
2953      z->state->mode = CHECK3;
2954    case CHECK3:
2955      NEEDBYTE
2956      z->state->sub.check.need += (uLong)NEXTBYTE << 16;
2957      z->state->mode = CHECK2;
2958    case CHECK2:
2959      NEEDBYTE
2960      z->state->sub.check.need += (uLong)NEXTBYTE << 8;
2961      z->state->mode = CHECK1;
2962    case CHECK1:
2963      NEEDBYTE
2964      z->state->sub.check.need += (uLong)NEXTBYTE;
2965
2966      if (z->state->sub.check.was != z->state->sub.check.need)
2967      {
2968        z->state->mode = BAD;
2969        z->msg = "incorrect data check";
2970        z->state->sub.marker = 5;       /* can't try inflateSync */
2971        break;
2972      }
2973      Trace((stderr, "inflate: zlib check ok\n"));
2974      z->state->mode = DONE;
2975    case DONE:
2976      return Z_STREAM_END;
2977    case BAD:
2978      return Z_DATA_ERROR;
2979    default:
2980      return Z_STREAM_ERROR;
2981  }
2982
2983 empty:
2984  if (f != Z_PACKET_FLUSH)
2985    return r;
2986  z->state->mode = BAD;
2987  z->state->sub.marker = 0;       /* can try inflateSync */
2988  return Z_DATA_ERROR;
2989}
2990
2991/*
2992 * This subroutine adds the data at next_in/avail_in to the output history
2993 * without performing any output.  The output buffer must be "caught up";
2994 * i.e. no pending output (hence s->read equals s->write), and the state must
2995 * be BLOCKS (i.e. we should be willing to see the start of a series of
2996 * BLOCKS).  On exit, the output will also be caught up, and the checksum
2997 * will have been updated if need be.
2998 */
2999
3000int inflateIncomp(z)
3001z_stream *z;
3002{
3003    if (z->state->mode != BLOCKS)
3004	return Z_DATA_ERROR;
3005    return inflate_addhistory(z->state->blocks, z);
3006}
3007
3008
3009int inflateSync(z)
3010z_stream *z;
3011{
3012  uInt n;       /* number of bytes to look at */
3013  Bytef *p;     /* pointer to bytes */
3014  uInt m;       /* number of marker bytes found in a row */
3015  uLong r, w;   /* temporaries to save total_in and total_out */
3016
3017  /* set up */
3018  if (z == Z_NULL || z->state == Z_NULL)
3019    return Z_STREAM_ERROR;
3020  if (z->state->mode != BAD)
3021  {
3022    z->state->mode = BAD;
3023    z->state->sub.marker = 0;
3024  }
3025  if ((n = z->avail_in) == 0)
3026    return Z_BUF_ERROR;
3027  p = z->next_in;
3028  m = z->state->sub.marker;
3029
3030  /* search */
3031  while (n && m < 4)
3032  {
3033    if (*p == (Byte)(m < 2 ? 0 : 0xff))
3034      m++;
3035    else if (*p)
3036      m = 0;
3037    else
3038      m = 4 - m;
3039    p++, n--;
3040  }
3041
3042  /* restore */
3043  z->total_in += p - z->next_in;
3044  z->next_in = p;
3045  z->avail_in = n;
3046  z->state->sub.marker = m;
3047
3048  /* return no joy or set up to restart on a new block */
3049  if (m != 4)
3050    return Z_DATA_ERROR;
3051  r = z->total_in;  w = z->total_out;
3052  inflateReset(z);
3053  z->total_in = r;  z->total_out = w;
3054  z->state->mode = BLOCKS;
3055  return Z_OK;
3056}
3057
3058#undef NEEDBYTE
3059#undef NEXTBYTE
3060
3061/*+++++*/
3062/* infutil.h -- types and macros common to blocks and codes
3063 * Copyright (C) 1995 Mark Adler
3064 * For conditions of distribution and use, see copyright notice in zlib.h
3065 */
3066
3067/* WARNING: this file should *not* be used by applications. It is
3068   part of the implementation of the compression library and is
3069   subject to change. Applications should only use zlib.h.
3070 */
3071
3072/* inflate blocks semi-private state */
3073struct inflate_blocks_state {
3074
3075  /* mode */
3076  enum {
3077      TYPE,     /* get type bits (3, including end bit) */
3078      LENS,     /* get lengths for stored */
3079      STORED,   /* processing stored block */
3080      TABLE,    /* get table lengths */
3081      BTREE,    /* get bit lengths tree for a dynamic block */
3082      DTREE,    /* get length, distance trees for a dynamic block */
3083      CODES,    /* processing fixed or dynamic block */
3084      DRY,      /* output remaining window bytes */
3085      DONEB,     /* finished last block, done */
3086      BADB}      /* got a data error--stuck here */
3087    mode;               /* current inflate_block mode */
3088
3089  /* mode dependent information */
3090  union {
3091    uInt left;          /* if STORED, bytes left to copy */
3092    struct {
3093      uInt table;               /* table lengths (14 bits) */
3094      uInt index;               /* index into blens (or border) */
3095      uIntf *blens;             /* bit lengths of codes */
3096      uInt bb;                  /* bit length tree depth */
3097      inflate_huft *tb;         /* bit length decoding tree */
3098      int nblens;		/* # elements allocated at blens */
3099    } trees;            /* if DTREE, decoding info for trees */
3100    struct {
3101      inflate_huft *tl, *td;    /* trees to free */
3102      inflate_codes_statef
3103         *codes;
3104    } decode;           /* if CODES, current state */
3105  } sub;                /* submode */
3106  uInt last;            /* true if this block is the last block */
3107
3108  /* mode independent information */
3109  uInt bitk;            /* bits in bit buffer */
3110  uLong bitb;           /* bit buffer */
3111  Bytef *window;        /* sliding window */
3112  Bytef *end;           /* one byte after sliding window */
3113  Bytef *read;          /* window read pointer */
3114  Bytef *write;         /* window write pointer */
3115  check_func checkfn;   /* check function */
3116  uLong check;          /* check on output */
3117
3118};
3119
3120
3121/* defines for inflate input/output */
3122/*   update pointers and return */
3123#define UPDBITS {s->bitb=b;s->bitk=k;}
3124#define UPDIN {z->avail_in=n;z->total_in+=p-z->next_in;z->next_in=p;}
3125#define UPDOUT {s->write=q;}
3126#define UPDATE {UPDBITS UPDIN UPDOUT}
3127#define LEAVE {UPDATE return inflate_flush(s,z,r);}
3128/*   get bytes and bits */
3129#define LOADIN {p=z->next_in;n=z->avail_in;b=s->bitb;k=s->bitk;}
3130#define NEEDBYTE {if(n)r=Z_OK;else LEAVE}
3131#define NEXTBYTE (n--,*p++)
3132#define NEEDBITS(j) {while(k<(j)){NEEDBYTE;b|=((uLong)NEXTBYTE)<<k;k+=8;}}
3133#define DUMPBITS(j) {b>>=(j);k-=(j);}
3134/*   output bytes */
3135#define WAVAIL (q<s->read?s->read-q-1:s->end-q)
3136#define LOADOUT {q=s->write;m=WAVAIL;}
3137#define WRAP {if(q==s->end&&s->read!=s->window){q=s->window;m=WAVAIL;}}
3138#define FLUSH {UPDOUT r=inflate_flush(s,z,r); LOADOUT}
3139#define NEEDOUT {if(m==0){WRAP if(m==0){FLUSH WRAP if(m==0) LEAVE}}r=Z_OK;}
3140#define OUTBYTE(a) {*q++=(Byte)(a);m--;}
3141/*   load local pointers */
3142#define LOAD {LOADIN LOADOUT}
3143
3144/* And'ing with mask[n] masks the lower n bits */
3145local uInt inflate_mask[] = {
3146    0x0000,
3147    0x0001, 0x0003, 0x0007, 0x000f, 0x001f, 0x003f, 0x007f, 0x00ff,
3148    0x01ff, 0x03ff, 0x07ff, 0x0fff, 0x1fff, 0x3fff, 0x7fff, 0xffff
3149};
3150
3151/* copy as much as possible from the sliding window to the output area */
3152local int inflate_flush OF((
3153    inflate_blocks_statef *,
3154    z_stream *,
3155    int));
3156
3157/*+++++*/
3158/* inffast.h -- header to use inffast.c
3159 * Copyright (C) 1995 Mark Adler
3160 * For conditions of distribution and use, see copyright notice in zlib.h
3161 */
3162
3163/* WARNING: this file should *not* be used by applications. It is
3164   part of the implementation of the compression library and is
3165   subject to change. Applications should only use zlib.h.
3166 */
3167
3168local int inflate_fast OF((
3169    uInt,
3170    uInt,
3171    inflate_huft *,
3172    inflate_huft *,
3173    inflate_blocks_statef *,
3174    z_stream *));
3175
3176
3177/*+++++*/
3178/* infblock.c -- interpret and process block types to last block
3179 * Copyright (C) 1995 Mark Adler
3180 * For conditions of distribution and use, see copyright notice in zlib.h
3181 */
3182
3183/* Table for deflate from PKZIP's appnote.txt. */
3184local uInt border[] = { /* Order of the bit length code lengths */
3185        16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15};
3186
3187/*
3188   Notes beyond the 1.93a appnote.txt:
3189
3190   1. Distance pointers never point before the beginning of the output
3191      stream.
3192   2. Distance pointers can point back across blocks, up to 32k away.
3193   3. There is an implied maximum of 7 bits for the bit length table and
3194      15 bits for the actual data.
3195   4. If only one code exists, then it is encoded using one bit.  (Zero
3196      would be more efficient, but perhaps a little confusing.)  If two
3197      codes exist, they are coded using one bit each (0 and 1).
3198   5. There is no way of sending zero distance codes--a dummy must be
3199      sent if there are none.  (History: a pre 2.0 version of PKZIP would
3200      store blocks with no distance codes, but this was discovered to be
3201      too harsh a criterion.)  Valid only for 1.93a.  2.04c does allow
3202      zero distance codes, which is sent as one code of zero bits in
3203      length.
3204   6. There are up to 286 literal/length codes.  Code 256 represents the
3205      end-of-block.  Note however that the static length tree defines
3206      288 codes just to fill out the Huffman codes.  Codes 286 and 287
3207      cannot be used though, since there is no length base or extra bits
3208      defined for them.  Similarily, there are up to 30 distance codes.
3209      However, static trees define 32 codes (all 5 bits) to fill out the
3210      Huffman codes, but the last two had better not show up in the data.
3211   7. Unzip can check dynamic Huffman blocks for complete code sets.
3212      The exception is that a single code would not be complete (see #4).
3213   8. The five bits following the block type is really the number of
3214      literal codes sent minus 257.
3215   9. Length codes 8,16,16 are interpreted as 13 length codes of 8 bits
3216      (1+6+6).  Therefore, to output three times the length, you output
3217      three codes (1+1+1), whereas to output four times the same length,
3218      you only need two codes (1+3).  Hmm.
3219  10. In the tree reconstruction algorithm, Code = Code + Increment
3220      only if BitLength(i) is not zero.  (Pretty obvious.)
3221  11. Correction: 4 Bits: # of Bit Length codes - 4     (4 - 19)
3222  12. Note: length code 284 can represent 227-258, but length code 285
3223      really is 258.  The last length deserves its own, short code
3224      since it gets used a lot in very redundant files.  The length
3225      258 is special since 258 - 3 (the min match length) is 255.
3226  13. The literal/length and distance code bit lengths are read as a
3227      single stream of lengths.  It is possible (and advantageous) for
3228      a repeat code (16, 17, or 18) to go across the boundary between
3229      the two sets of lengths.
3230 */
3231
3232
3233local void inflate_blocks_reset(s, z, c)
3234inflate_blocks_statef *s;
3235z_stream *z;
3236uLongf *c;
3237{
3238  if (s->checkfn != Z_NULL)
3239    *c = s->check;
3240  if (s->mode == BTREE || s->mode == DTREE)
3241    ZFREE(z, s->sub.trees.blens, s->sub.trees.nblens * sizeof(uInt));
3242  if (s->mode == CODES)
3243  {
3244    inflate_codes_free(s->sub.decode.codes, z);
3245    inflate_trees_free(s->sub.decode.td, z);
3246    inflate_trees_free(s->sub.decode.tl, z);
3247  }
3248  s->mode = TYPE;
3249  s->bitk = 0;
3250  s->bitb = 0;
3251  s->read = s->write = s->window;
3252  if (s->checkfn != Z_NULL)
3253    s->check = (*s->checkfn)(0L, Z_NULL, 0);
3254  Trace((stderr, "inflate:   blocks reset\n"));
3255}
3256
3257
3258local inflate_blocks_statef *inflate_blocks_new(z, c, w)
3259z_stream *z;
3260check_func c;
3261uInt w;
3262{
3263  inflate_blocks_statef *s;
3264
3265  if ((s = (inflate_blocks_statef *)ZALLOC_INIT
3266       (z,1,sizeof(struct inflate_blocks_state))) == Z_NULL)
3267    return s;
3268  if ((s->window = (Bytef *)ZALLOC_INIT(z, 1, w)) == Z_NULL)
3269  {
3270    ZFREE(z, s, sizeof(struct inflate_blocks_state));
3271    return Z_NULL;
3272  }
3273  s->end = s->window + w;
3274  s->checkfn = c;
3275  s->mode = TYPE;
3276  Trace((stderr, "inflate:   blocks allocated\n"));
3277  inflate_blocks_reset(s, z, &s->check);
3278  return s;
3279}
3280
3281
3282local int inflate_blocks(s, z, r)
3283inflate_blocks_statef *s;
3284z_stream *z;
3285int r;
3286{
3287  uInt t;               /* temporary storage */
3288  uLong b;              /* bit buffer */
3289  uInt k;               /* bits in bit buffer */
3290  Bytef *p;             /* input data pointer */
3291  uInt n;               /* bytes available there */
3292  Bytef *q;             /* output window write pointer */
3293  uInt m;               /* bytes to end of window or read pointer */
3294
3295  /* copy input/output information to locals (UPDATE macro restores) */
3296  LOAD
3297
3298  /* process input based on current state */
3299  while (1) switch (s->mode)
3300  {
3301    case TYPE:
3302      NEEDBITS(3)
3303      t = (uInt)b & 7;
3304      s->last = t & 1;
3305      switch (t >> 1)
3306      {
3307        case 0:                         /* stored */
3308          Trace((stderr, "inflate:     stored block%s\n",
3309                 s->last ? " (last)" : ""));
3310          DUMPBITS(3)
3311          t = k & 7;                    /* go to byte boundary */
3312          DUMPBITS(t)
3313          s->mode = LENS;               /* get length of stored block */
3314          break;
3315        case 1:                         /* fixed */
3316          Trace((stderr, "inflate:     fixed codes block%s\n",
3317                 s->last ? " (last)" : ""));
3318          {
3319            uInt bl, bd;
3320            inflate_huft *tl, *td;
3321
3322            inflate_trees_fixed(&bl, &bd, &tl, &td);
3323            s->sub.decode.codes = inflate_codes_new(bl, bd, tl, td, z);
3324            if (s->sub.decode.codes == Z_NULL)
3325            {
3326              r = Z_MEM_ERROR;
3327              LEAVE
3328            }
3329            s->sub.decode.tl = Z_NULL;  /* don't try to free these */
3330            s->sub.decode.td = Z_NULL;
3331          }
3332          DUMPBITS(3)
3333          s->mode = CODES;
3334          break;
3335        case 2:                         /* dynamic */
3336          Trace((stderr, "inflate:     dynamic codes block%s\n",
3337                 s->last ? " (last)" : ""));
3338          DUMPBITS(3)
3339          s->mode = TABLE;
3340          break;
3341        case 3:                         /* illegal */
3342          DUMPBITS(3)
3343          s->mode = BADB;
3344          z->msg = "invalid block type";
3345          r = Z_DATA_ERROR;
3346          LEAVE
3347      }
3348      break;
3349    case LENS:
3350      NEEDBITS(32)
3351      if (((~b) >> 16) != (b & 0xffff))
3352      {
3353        s->mode = BADB;
3354        z->msg = "invalid stored block lengths";
3355        r = Z_DATA_ERROR;
3356        LEAVE
3357      }
3358      s->sub.left = (uInt)b & 0xffff;
3359      b = k = 0;                      /* dump bits */
3360      Tracev((stderr, "inflate:       stored length %u\n", s->sub.left));
3361      s->mode = s->sub.left ? STORED : TYPE;
3362      break;
3363    case STORED:
3364      if (n == 0)
3365        LEAVE
3366      NEEDOUT
3367      t = s->sub.left;
3368      if (t > n) t = n;
3369      if (t > m) t = m;
3370      zmemcpy(q, p, t);
3371      p += t;  n -= t;
3372      q += t;  m -= t;
3373      if ((s->sub.left -= t) != 0)
3374        break;
3375      Tracev((stderr, "inflate:       stored end, %lu total out\n",
3376              z->total_out + (q >= s->read ? q - s->read :
3377              (s->end - s->read) + (q - s->window))));
3378      s->mode = s->last ? DRY : TYPE;
3379      break;
3380    case TABLE:
3381      NEEDBITS(14)
3382      s->sub.trees.table = t = (uInt)b & 0x3fff;
3383#ifndef PKZIP_BUG_WORKAROUND
3384      if ((t & 0x1f) > 29 || ((t >> 5) & 0x1f) > 29)
3385      {
3386        s->mode = BADB;
3387        z->msg = "too many length or distance symbols";
3388        r = Z_DATA_ERROR;
3389        LEAVE
3390      }
3391#endif
3392      t = 258 + (t & 0x1f) + ((t >> 5) & 0x1f);
3393      if (t < 19)
3394        t = 19;
3395      if ((s->sub.trees.blens = (uIntf*)ZALLOC(z, t, sizeof(uInt))) == Z_NULL)
3396      {
3397        r = Z_MEM_ERROR;
3398        LEAVE
3399      }
3400      s->sub.trees.nblens = t;
3401      DUMPBITS(14)
3402      s->sub.trees.index = 0;
3403      Tracev((stderr, "inflate:       table sizes ok\n"));
3404      s->mode = BTREE;
3405    case BTREE:
3406      while (s->sub.trees.index < 4 + (s->sub.trees.table >> 10))
3407      {
3408        NEEDBITS(3)
3409        s->sub.trees.blens[border[s->sub.trees.index++]] = (uInt)b & 7;
3410        DUMPBITS(3)
3411      }
3412      while (s->sub.trees.index < 19)
3413        s->sub.trees.blens[border[s->sub.trees.index++]] = 0;
3414      s->sub.trees.bb = 7;
3415      t = inflate_trees_bits(s->sub.trees.blens, &s->sub.trees.bb,
3416                             &s->sub.trees.tb, z);
3417      if (t != Z_OK)
3418      {
3419        r = t;
3420        if (r == Z_DATA_ERROR)
3421          s->mode = BADB;
3422        LEAVE
3423      }
3424      s->sub.trees.index = 0;
3425      Tracev((stderr, "inflate:       bits tree ok\n"));
3426      s->mode = DTREE;
3427    case DTREE:
3428      while (t = s->sub.trees.table,
3429             s->sub.trees.index < 258 + (t & 0x1f) + ((t >> 5) & 0x1f))
3430      {
3431        inflate_huft *h;
3432        uInt i, j, c;
3433
3434        t = s->sub.trees.bb;
3435        NEEDBITS(t)
3436        h = s->sub.trees.tb + ((uInt)b & inflate_mask[t]);
3437        t = h->word.what.Bits;
3438        c = h->more.Base;
3439        if (c < 16)
3440        {
3441          DUMPBITS(t)
3442          s->sub.trees.blens[s->sub.trees.index++] = c;
3443        }
3444        else /* c == 16..18 */
3445        {
3446          i = c == 18 ? 7 : c - 14;
3447          j = c == 18 ? 11 : 3;
3448          NEEDBITS(t + i)
3449          DUMPBITS(t)
3450          j += (uInt)b & inflate_mask[i];
3451          DUMPBITS(i)
3452          i = s->sub.trees.index;
3453          t = s->sub.trees.table;
3454          if (i + j > 258 + (t & 0x1f) + ((t >> 5) & 0x1f) ||
3455              (c == 16 && i < 1))
3456          {
3457            s->mode = BADB;
3458            z->msg = "invalid bit length repeat";
3459            r = Z_DATA_ERROR;
3460            LEAVE
3461          }
3462          c = c == 16 ? s->sub.trees.blens[i - 1] : 0;
3463          do {
3464            s->sub.trees.blens[i++] = c;
3465          } while (--j);
3466          s->sub.trees.index = i;
3467        }
3468      }
3469      inflate_trees_free(s->sub.trees.tb, z);
3470      s->sub.trees.tb = Z_NULL;
3471      {
3472        uInt bl, bd;
3473        inflate_huft *tl, *td;
3474        inflate_codes_statef *c;
3475
3476        bl = 9;         /* must be <= 9 for lookahead assumptions */
3477        bd = 6;         /* must be <= 9 for lookahead assumptions */
3478        t = s->sub.trees.table;
3479        t = inflate_trees_dynamic(257 + (t & 0x1f), 1 + ((t >> 5) & 0x1f),
3480                                  s->sub.trees.blens, &bl, &bd, &tl, &td, z);
3481        if (t != Z_OK)
3482        {
3483          if (t == (uInt)Z_DATA_ERROR)
3484            s->mode = BADB;
3485          r = t;
3486          LEAVE
3487        }
3488        Tracev((stderr, "inflate:       trees ok\n"));
3489        if ((c = inflate_codes_new(bl, bd, tl, td, z)) == Z_NULL)
3490        {
3491          inflate_trees_free(td, z);
3492          inflate_trees_free(tl, z);
3493          r = Z_MEM_ERROR;
3494          LEAVE
3495        }
3496        ZFREE(z, s->sub.trees.blens, s->sub.trees.nblens * sizeof(uInt));
3497        s->sub.decode.codes = c;
3498        s->sub.decode.tl = tl;
3499        s->sub.decode.td = td;
3500      }
3501      s->mode = CODES;
3502    case CODES:
3503      UPDATE
3504      if ((r = inflate_codes(s, z, r)) != Z_STREAM_END)
3505        return inflate_flush(s, z, r);
3506      r = Z_OK;
3507      inflate_codes_free(s->sub.decode.codes, z);
3508      inflate_trees_free(s->sub.decode.td, z);
3509      inflate_trees_free(s->sub.decode.tl, z);
3510      LOAD
3511      Tracev((stderr, "inflate:       codes end, %lu total out\n",
3512              z->total_out + (q >= s->read ? q - s->read :
3513              (s->end - s->read) + (q - s->window))));
3514      if (!s->last)
3515      {
3516        s->mode = TYPE;
3517        break;
3518      }
3519      if (k > 7)              /* return unused byte, if any */
3520      {
3521        Assert(k < 16, "inflate_codes grabbed too many bytes")
3522        k -= 8;
3523        n++;
3524        p--;                    /* can always return one */
3525      }
3526      s->mode = DRY;
3527    case DRY:
3528      FLUSH
3529      if (s->read != s->write)
3530        LEAVE
3531      s->mode = DONEB;
3532    case DONEB:
3533      r = Z_STREAM_END;
3534      LEAVE
3535    case BADB:
3536      r = Z_DATA_ERROR;
3537      LEAVE
3538    default:
3539      r = Z_STREAM_ERROR;
3540      LEAVE
3541  }
3542}
3543
3544
3545local int inflate_blocks_free(s, z, c)
3546inflate_blocks_statef *s;
3547z_stream *z;
3548uLongf *c;
3549{
3550  inflate_blocks_reset(s, z, c);
3551  ZFREE(z, s->window, s->end - s->window);
3552  ZFREE(z, s, sizeof(struct inflate_blocks_state));
3553  Trace((stderr, "inflate:   blocks freed\n"));
3554  return Z_OK;
3555}
3556
3557/*
3558 * This subroutine adds the data at next_in/avail_in to the output history
3559 * without performing any output.  The output buffer must be "caught up";
3560 * i.e. no pending output (hence s->read equals s->write), and the state must
3561 * be BLOCKS (i.e. we should be willing to see the start of a series of
3562 * BLOCKS).  On exit, the output will also be caught up, and the checksum
3563 * will have been updated if need be.
3564 */
3565local int inflate_addhistory(s, z)
3566inflate_blocks_statef *s;
3567z_stream *z;
3568{
3569    uLong b;              /* bit buffer */  /* NOT USED HERE */
3570    uInt k;               /* bits in bit buffer */ /* NOT USED HERE */
3571    uInt t;               /* temporary storage */
3572    Bytef *p;             /* input data pointer */
3573    uInt n;               /* bytes available there */
3574    Bytef *q;             /* output window write pointer */
3575    uInt m;               /* bytes to end of window or read pointer */
3576
3577    if (s->read != s->write)
3578	return Z_STREAM_ERROR;
3579    if (s->mode != TYPE)
3580	return Z_DATA_ERROR;
3581
3582    /* we're ready to rock */
3583    LOAD
3584    /* while there is input ready, copy to output buffer, moving
3585     * pointers as needed.
3586     */
3587    while (n) {
3588	t = n;  /* how many to do */
3589	/* is there room until end of buffer? */
3590	if (t > m) t = m;
3591	/* update check information */
3592	if (s->checkfn != Z_NULL)
3593	    s->check = (*s->checkfn)(s->check, q, t);
3594	zmemcpy(q, p, t);
3595	q += t;
3596	p += t;
3597	n -= t;
3598	z->total_out += t;
3599	s->read = q;    /* drag read pointer forward */
3600/*      WRAP  */ 	/* expand WRAP macro by hand to handle s->read */
3601	if (q == s->end) {
3602	    s->read = q = s->window;
3603	    m = WAVAIL;
3604	}
3605    }
3606    UPDATE
3607    return Z_OK;
3608}
3609
3610
3611/*
3612 * At the end of a Deflate-compressed PPP packet, we expect to have seen
3613 * a `stored' block type value but not the (zero) length bytes.
3614 */
3615local int inflate_packet_flush(s)
3616    inflate_blocks_statef *s;
3617{
3618    if (s->mode != LENS)
3619	return Z_DATA_ERROR;
3620    s->mode = TYPE;
3621    return Z_OK;
3622}
3623
3624
3625/*+++++*/
3626/* inftrees.c -- generate Huffman trees for efficient decoding
3627 * Copyright (C) 1995 Mark Adler
3628 * For conditions of distribution and use, see copyright notice in zlib.h
3629 */
3630
3631/* simplify the use of the inflate_huft type with some defines */
3632#define base more.Base
3633#define next more.Next
3634#define exop word.what.Exop
3635#define bits word.what.Bits
3636
3637
3638local int huft_build OF((
3639    uIntf *,            /* code lengths in bits */
3640    uInt,               /* number of codes */
3641    uInt,               /* number of "simple" codes */
3642    uIntf *,            /* list of base values for non-simple codes */
3643    uIntf *,            /* list of extra bits for non-simple codes */
3644    inflate_huft * FAR*,/* result: starting table */
3645    uIntf *,            /* maximum lookup bits (returns actual) */
3646    z_stream *));       /* for zalloc function */
3647
3648local voidpf falloc OF((
3649    voidpf,             /* opaque pointer (not used) */
3650    uInt,               /* number of items */
3651    uInt));             /* size of item */
3652
3653local void ffree OF((
3654    voidpf q,           /* opaque pointer (not used) */
3655    voidpf p,           /* what to free (not used) */
3656    uInt n));		/* number of bytes (not used) */
3657
3658/* Tables for deflate from PKZIP's appnote.txt. */
3659local uInt cplens[] = { /* Copy lengths for literal codes 257..285 */
3660        3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31,
3661        35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0};
3662        /* actually lengths - 2; also see note #13 above about 258 */
3663local uInt cplext[] = { /* Extra bits for literal codes 257..285 */
3664        0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2,
3665        3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0, 192, 192}; /* 192==invalid */
3666local uInt cpdist[] = { /* Copy offsets for distance codes 0..29 */
3667        1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193,
3668        257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145,
3669        8193, 12289, 16385, 24577};
3670local uInt cpdext[] = { /* Extra bits for distance codes */
3671        0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6,
3672        7, 7, 8, 8, 9, 9, 10, 10, 11, 11,
3673        12, 12, 13, 13};
3674
3675/*
3676   Huffman code decoding is performed using a multi-level table lookup.
3677   The fastest way to decode is to simply build a lookup table whose
3678   size is determined by the longest code.  However, the time it takes
3679   to build this table can also be a factor if the data being decoded
3680   is not very long.  The most common codes are necessarily the
3681   shortest codes, so those codes dominate the decoding time, and hence
3682   the speed.  The idea is you can have a shorter table that decodes the
3683   shorter, more probable codes, and then point to subsidiary tables for
3684   the longer codes.  The time it costs to decode the longer codes is
3685   then traded against the time it takes to make longer tables.
3686
3687   This results of this trade are in the variables lbits and dbits
3688   below.  lbits is the number of bits the first level table for literal/
3689   length codes can decode in one step, and dbits is the same thing for
3690   the distance codes.  Subsequent tables are also less than or equal to
3691   those sizes.  These values may be adjusted either when all of the
3692   codes are shorter than that, in which case the longest code length in
3693   bits is used, or when the shortest code is *longer* than the requested
3694   table size, in which case the length of the shortest code in bits is
3695   used.
3696
3697   There are two different values for the two tables, since they code a
3698   different number of possibilities each.  The literal/length table
3699   codes 286 possible values, or in a flat code, a little over eight
3700   bits.  The distance table codes 30 possible values, or a little less
3701   than five bits, flat.  The optimum values for speed end up being
3702   about one bit more than those, so lbits is 8+1 and dbits is 5+1.
3703   The optimum values may differ though from machine to machine, and
3704   possibly even between compilers.  Your mileage may vary.
3705 */
3706
3707
3708/* If BMAX needs to be larger than 16, then h and x[] should be uLong. */
3709#define BMAX 15         /* maximum bit length of any code */
3710#define N_MAX 288       /* maximum number of codes in any set */
3711
3712#ifdef DEBUG_ZLIB
3713  uInt inflate_hufts;
3714#endif
3715
3716local int huft_build(b, n, s, d, e, t, m, zs)
3717uIntf *b;               /* code lengths in bits (all assumed <= BMAX) */
3718uInt n;                 /* number of codes (assumed <= N_MAX) */
3719uInt s;                 /* number of simple-valued codes (0..s-1) */
3720uIntf *d;               /* list of base values for non-simple codes */
3721uIntf *e;               /* list of extra bits for non-simple codes */
3722inflate_huft * FAR *t;  /* result: starting table */
3723uIntf *m;               /* maximum lookup bits, returns actual */
3724z_stream *zs;           /* for zalloc function */
3725/* Given a list of code lengths and a maximum table size, make a set of
3726   tables to decode that set of codes.  Return Z_OK on success, Z_BUF_ERROR
3727   if the given code set is incomplete (the tables are still built in this
3728   case), Z_DATA_ERROR if the input is invalid (all zero length codes or an
3729   over-subscribed set of lengths), or Z_MEM_ERROR if not enough memory. */
3730{
3731
3732  uInt a;                       /* counter for codes of length k */
3733  uInt c[BMAX+1];               /* bit length count table */
3734  uInt f;                       /* i repeats in table every f entries */
3735  int g;                        /* maximum code length */
3736  int h;                        /* table level */
3737  register uInt i;              /* counter, current code */
3738  register uInt j;              /* counter */
3739  register int k;               /* number of bits in current code */
3740  int l;                        /* bits per table (returned in m) */
3741  register uIntf *p;            /* pointer into c[], b[], or v[] */
3742  inflate_huft *q;              /* points to current table */
3743  struct inflate_huft_s r;      /* table entry for structure assignment */
3744  inflate_huft *u[BMAX];        /* table stack */
3745  uInt v[N_MAX];                /* values in order of bit length */
3746  register int w;               /* bits before this table == (l * h) */
3747  uInt x[BMAX+1];               /* bit offsets, then code stack */
3748  uIntf *xp;                    /* pointer into x */
3749  int y;                        /* number of dummy codes added */
3750  uInt z;                       /* number of entries in current table */
3751
3752
3753  /* Generate counts for each bit length */
3754  p = c;
3755#define C0 *p++ = 0;
3756#define C2 C0 C0 C0 C0
3757#define C4 C2 C2 C2 C2
3758  C4                            /* clear c[]--assume BMAX+1 is 16 */
3759  p = b;  i = n;
3760  do {
3761    c[*p++]++;                  /* assume all entries <= BMAX */
3762  } while (--i);
3763  if (c[0] == n)                /* null input--all zero length codes */
3764  {
3765    *t = (inflate_huft *)Z_NULL;
3766    *m = 0;
3767    return Z_OK;
3768  }
3769
3770
3771  /* Find minimum and maximum length, bound *m by those */
3772  l = *m;
3773  for (j = 1; j <= BMAX; j++)
3774    if (c[j])
3775      break;
3776  k = j;                        /* minimum code length */
3777  if ((uInt)l < j)
3778    l = j;
3779  for (i = BMAX; i; i--)
3780    if (c[i])
3781      break;
3782  g = i;                        /* maximum code length */
3783  if ((uInt)l > i)
3784    l = i;
3785  *m = l;
3786
3787
3788  /* Adjust last length count to fill out codes, if needed */
3789  for (y = 1 << j; j < i; j++, y <<= 1)
3790    if ((y -= c[j]) < 0)
3791      return Z_DATA_ERROR;
3792  if ((y -= c[i]) < 0)
3793    return Z_DATA_ERROR;
3794  c[i] += y;
3795
3796
3797  /* Generate starting offsets into the value table for each length */
3798  x[1] = j = 0;
3799  p = c + 1;  xp = x + 2;
3800  while (--i) {                 /* note that i == g from above */
3801    *xp++ = (j += *p++);
3802  }
3803
3804
3805  /* Make a table of values in order of bit lengths */
3806  p = b;  i = 0;
3807  do {
3808    if ((j = *p++) != 0)
3809      v[x[j]++] = i;
3810  } while (++i < n);
3811
3812
3813  /* Generate the Huffman codes and for each, make the table entries */
3814  x[0] = i = 0;                 /* first Huffman code is zero */
3815  p = v;                        /* grab values in bit order */
3816  h = -1;                       /* no tables yet--level -1 */
3817  w = -l;                       /* bits decoded == (l * h) */
3818  u[0] = (inflate_huft *)Z_NULL;        /* just to keep compilers happy */
3819  q = (inflate_huft *)Z_NULL;   /* ditto */
3820  z = 0;                        /* ditto */
3821
3822  /* go through the bit lengths (k already is bits in shortest code) */
3823  for (; k <= g; k++)
3824  {
3825    a = c[k];
3826    while (a--)
3827    {
3828      /* here i is the Huffman code of length k bits for value *p */
3829      /* make tables up to required level */
3830      while (k > w + l)
3831      {
3832        h++;
3833        w += l;                 /* previous table always l bits */
3834
3835        /* compute minimum size table less than or equal to l bits */
3836        z = (z = g - w) > (uInt)l ? l : z;      /* table size upper limit */
3837        if ((f = 1 << (j = k - w)) > a + 1)     /* try a k-w bit table */
3838        {                       /* too few codes for k-w bit table */
3839          f -= a + 1;           /* deduct codes from patterns left */
3840          xp = c + k;
3841          if (j < z)
3842            while (++j < z)     /* try smaller tables up to z bits */
3843            {
3844              if ((f <<= 1) <= *++xp)
3845                break;          /* enough codes to use up j bits */
3846              f -= *xp;         /* else deduct codes from patterns */
3847            }
3848        }
3849        z = 1 << j;             /* table entries for j-bit table */
3850
3851        /* allocate and link in new table */
3852        if ((q = (inflate_huft *)ZALLOC
3853             (zs,z + 1,sizeof(inflate_huft))) == Z_NULL)
3854        {
3855          if (h)
3856            inflate_trees_free(u[0], zs);
3857          return Z_MEM_ERROR;   /* not enough memory */
3858        }
3859	q->word.Nalloc = z + 1;
3860#ifdef DEBUG_ZLIB
3861        inflate_hufts += z + 1;
3862#endif
3863        *t = q + 1;             /* link to list for huft_free() */
3864        *(t = &(q->next)) = Z_NULL;
3865        u[h] = ++q;             /* table starts after link */
3866
3867        /* connect to last table, if there is one */
3868        if (h)
3869        {
3870          x[h] = i;             /* save pattern for backing up */
3871          r.bits = (Byte)l;     /* bits to dump before this table */
3872          r.exop = (Byte)j;     /* bits in this table */
3873          r.next = q;           /* pointer to this table */
3874          j = i >> (w - l);     /* (get around Turbo C bug) */
3875          u[h-1][j] = r;        /* connect to last table */
3876        }
3877      }
3878
3879      /* set up table entry in r */
3880      r.bits = (Byte)(k - w);
3881      if (p >= v + n)
3882        r.exop = 128 + 64;      /* out of values--invalid code */
3883      else if (*p < s)
3884      {
3885        r.exop = (Byte)(*p < 256 ? 0 : 32 + 64);     /* 256 is end-of-block */
3886        r.base = *p++;          /* simple code is just the value */
3887      }
3888      else
3889      {
3890        r.exop = (Byte)e[*p - s] + 16 + 64; /* non-simple--look up in lists */
3891        r.base = d[*p++ - s];
3892      }
3893
3894      /* fill code-like entries with r */
3895      f = 1 << (k - w);
3896      for (j = i >> w; j < z; j += f)
3897        q[j] = r;
3898
3899      /* backwards increment the k-bit code i */
3900      for (j = 1 << (k - 1); i & j; j >>= 1)
3901        i ^= j;
3902      i ^= j;
3903
3904      /* backup over finished tables */
3905      while ((i & ((1 << w) - 1)) != x[h])
3906      {
3907        h--;                    /* don't need to update q */
3908        w -= l;
3909      }
3910    }
3911  }
3912
3913
3914  /* Return Z_BUF_ERROR if we were given an incomplete table */
3915  return y != 0 && g != 1 ? Z_BUF_ERROR : Z_OK;
3916}
3917
3918
3919local int inflate_trees_bits(c, bb, tb, z)
3920uIntf *c;               /* 19 code lengths */
3921uIntf *bb;              /* bits tree desired/actual depth */
3922inflate_huft * FAR *tb; /* bits tree result */
3923z_stream *z;            /* for zfree function */
3924{
3925  int r;
3926
3927  r = huft_build(c, 19, 19, (uIntf*)Z_NULL, (uIntf*)Z_NULL, tb, bb, z);
3928  if (r == Z_DATA_ERROR)
3929    z->msg = "oversubscribed dynamic bit lengths tree";
3930  else if (r == Z_BUF_ERROR)
3931  {
3932    inflate_trees_free(*tb, z);
3933    z->msg = "incomplete dynamic bit lengths tree";
3934    r = Z_DATA_ERROR;
3935  }
3936  return r;
3937}
3938
3939
3940local int inflate_trees_dynamic(nl, nd, c, bl, bd, tl, td, z)
3941uInt nl;                /* number of literal/length codes */
3942uInt nd;                /* number of distance codes */
3943uIntf *c;               /* that many (total) code lengths */
3944uIntf *bl;              /* literal desired/actual bit depth */
3945uIntf *bd;              /* distance desired/actual bit depth */
3946inflate_huft * FAR *tl; /* literal/length tree result */
3947inflate_huft * FAR *td; /* distance tree result */
3948z_stream *z;            /* for zfree function */
3949{
3950  int r;
3951
3952  /* build literal/length tree */
3953  if ((r = huft_build(c, nl, 257, cplens, cplext, tl, bl, z)) != Z_OK)
3954  {
3955    if (r == Z_DATA_ERROR)
3956      z->msg = "oversubscribed literal/length tree";
3957    else if (r == Z_BUF_ERROR)
3958    {
3959      inflate_trees_free(*tl, z);
3960      z->msg = "incomplete literal/length tree";
3961      r = Z_DATA_ERROR;
3962    }
3963    return r;
3964  }
3965
3966  /* build distance tree */
3967  if ((r = huft_build(c + nl, nd, 0, cpdist, cpdext, td, bd, z)) != Z_OK)
3968  {
3969    if (r == Z_DATA_ERROR)
3970      z->msg = "oversubscribed literal/length tree";
3971    else if (r == Z_BUF_ERROR) {
3972#ifdef PKZIP_BUG_WORKAROUND
3973      r = Z_OK;
3974    }
3975#else
3976      inflate_trees_free(*td, z);
3977      z->msg = "incomplete literal/length tree";
3978      r = Z_DATA_ERROR;
3979    }
3980    inflate_trees_free(*tl, z);
3981    return r;
3982#endif
3983  }
3984
3985  /* done */
3986  return Z_OK;
3987}
3988
3989
3990/* build fixed tables only once--keep them here */
3991local int fixed_lock = 0;
3992local int fixed_built = 0;
3993#define FIXEDH 530      /* number of hufts used by fixed tables */
3994local uInt fixed_left = FIXEDH;
3995local inflate_huft fixed_mem[FIXEDH];
3996local uInt fixed_bl;
3997local uInt fixed_bd;
3998local inflate_huft *fixed_tl;
3999local inflate_huft *fixed_td;
4000
4001
4002local voidpf falloc(q, n, s)
4003voidpf q;        /* opaque pointer (not used) */
4004uInt n;         /* number of items */
4005uInt s;         /* size of item */
4006{
4007  Assert(s == sizeof(inflate_huft) && n <= fixed_left,
4008         "inflate_trees falloc overflow");
4009  if (q) s++; /* to make some compilers happy */
4010  fixed_left -= n;
4011  return (voidpf)(fixed_mem + fixed_left);
4012}
4013
4014
4015local void ffree(q, p, n)
4016voidpf q;
4017voidpf p;
4018uInt n;
4019{
4020  Assert(0, "inflate_trees ffree called!");
4021  if (q) q = p; /* to make some compilers happy */
4022}
4023
4024
4025local int inflate_trees_fixed(bl, bd, tl, td)
4026uIntf *bl;               /* literal desired/actual bit depth */
4027uIntf *bd;               /* distance desired/actual bit depth */
4028inflate_huft * FAR *tl;  /* literal/length tree result */
4029inflate_huft * FAR *td;  /* distance tree result */
4030{
4031  /* build fixed tables if not built already--lock out other instances */
4032  while (++fixed_lock > 1)
4033    fixed_lock--;
4034  if (!fixed_built)
4035  {
4036    int k;              /* temporary variable */
4037    unsigned c[288];    /* length list for huft_build */
4038    z_stream z;         /* for falloc function */
4039
4040    /* set up fake z_stream for memory routines */
4041    z.zalloc = falloc;
4042    z.zfree = ffree;
4043    z.opaque = Z_NULL;
4044
4045    /* literal table */
4046    for (k = 0; k < 144; k++)
4047      c[k] = 8;
4048    for (; k < 256; k++)
4049      c[k] = 9;
4050    for (; k < 280; k++)
4051      c[k] = 7;
4052    for (; k < 288; k++)
4053      c[k] = 8;
4054    fixed_bl = 7;
4055    huft_build(c, 288, 257, cplens, cplext, &fixed_tl, &fixed_bl, &z);
4056
4057    /* distance table */
4058    for (k = 0; k < 30; k++)
4059      c[k] = 5;
4060    fixed_bd = 5;
4061    huft_build(c, 30, 0, cpdist, cpdext, &fixed_td, &fixed_bd, &z);
4062
4063    /* done */
4064    fixed_built = 1;
4065  }
4066  fixed_lock--;
4067  *bl = fixed_bl;
4068  *bd = fixed_bd;
4069  *tl = fixed_tl;
4070  *td = fixed_td;
4071  return Z_OK;
4072}
4073
4074
4075local int inflate_trees_free(t, z)
4076inflate_huft *t;        /* table to free */
4077z_stream *z;            /* for zfree function */
4078/* Free the malloc'ed tables built by huft_build(), which makes a linked
4079   list of the tables it made, with the links in a dummy first entry of
4080   each table. */
4081{
4082  register inflate_huft *p, *q;
4083
4084  /* Go through linked list, freeing from the malloced (t[-1]) address. */
4085  p = t;
4086  while (p != Z_NULL)
4087  {
4088    q = (--p)->next;
4089    ZFREE(z, p, p->word.Nalloc * sizeof(inflate_huft));
4090    p = q;
4091  }
4092  return Z_OK;
4093}
4094
4095/*+++++*/
4096/* infcodes.c -- process literals and length/distance pairs
4097 * Copyright (C) 1995 Mark Adler
4098 * For conditions of distribution and use, see copyright notice in zlib.h
4099 */
4100
4101/* simplify the use of the inflate_huft type with some defines */
4102#define base more.Base
4103#define next more.Next
4104#define exop word.what.Exop
4105#define bits word.what.Bits
4106
4107/* inflate codes private state */
4108struct inflate_codes_state {
4109
4110  /* mode */
4111  enum {        /* waiting for "i:"=input, "o:"=output, "x:"=nothing */
4112      START,    /* x: set up for LEN */
4113      LEN,      /* i: get length/literal/eob next */
4114      LENEXT,   /* i: getting length extra (have base) */
4115      DIST,     /* i: get distance next */
4116      DISTEXT,  /* i: getting distance extra */
4117      COPY,     /* o: copying bytes in window, waiting for space */
4118      LIT,      /* o: got literal, waiting for output space */
4119      WASH,     /* o: got eob, possibly still output waiting */
4120      END,      /* x: got eob and all data flushed */
4121      BADCODE}  /* x: got error */
4122    mode;               /* current inflate_codes mode */
4123
4124  /* mode dependent information */
4125  uInt len;
4126  union {
4127    struct {
4128      inflate_huft *tree;       /* pointer into tree */
4129      uInt need;                /* bits needed */
4130    } code;             /* if LEN or DIST, where in tree */
4131    uInt lit;           /* if LIT, literal */
4132    struct {
4133      uInt get;                 /* bits to get for extra */
4134      uInt dist;                /* distance back to copy from */
4135    } copy;             /* if EXT or COPY, where and how much */
4136  } sub;                /* submode */
4137
4138  /* mode independent information */
4139  Byte lbits;           /* ltree bits decoded per branch */
4140  Byte dbits;           /* dtree bits decoder per branch */
4141  inflate_huft *ltree;          /* literal/length/eob tree */
4142  inflate_huft *dtree;          /* distance tree */
4143
4144};
4145
4146
4147local inflate_codes_statef *inflate_codes_new(bl, bd, tl, td, z)
4148uInt bl, bd;
4149inflate_huft *tl, *td;
4150z_stream *z;
4151{
4152  inflate_codes_statef *c;
4153
4154  if ((c = (inflate_codes_statef *)
4155       ZALLOC(z,1,sizeof(struct inflate_codes_state))) != Z_NULL)
4156  {
4157    c->mode = START;
4158    c->lbits = (Byte)bl;
4159    c->dbits = (Byte)bd;
4160    c->ltree = tl;
4161    c->dtree = td;
4162    Tracev((stderr, "inflate:       codes new\n"));
4163  }
4164  return c;
4165}
4166
4167
4168local int inflate_codes(s, z, r)
4169inflate_blocks_statef *s;
4170z_stream *z;
4171int r;
4172{
4173  uInt j;               /* temporary storage */
4174  inflate_huft *t;      /* temporary pointer */
4175  uInt e;               /* extra bits or operation */
4176  uLong b;              /* bit buffer */
4177  uInt k;               /* bits in bit buffer */
4178  Bytef *p;             /* input data pointer */
4179  uInt n;               /* bytes available there */
4180  Bytef *q;             /* output window write pointer */
4181  uInt m;               /* bytes to end of window or read pointer */
4182  Bytef *f;             /* pointer to copy strings from */
4183  inflate_codes_statef *c = s->sub.decode.codes;  /* codes state */
4184
4185  /* copy input/output information to locals (UPDATE macro restores) */
4186  LOAD
4187
4188  /* process input and output based on current state */
4189  while (1) switch (c->mode)
4190  {             /* waiting for "i:"=input, "o:"=output, "x:"=nothing */
4191    case START:         /* x: set up for LEN */
4192#ifndef SLOW
4193      if (m >= 258 && n >= 10)
4194      {
4195        UPDATE
4196        r = inflate_fast(c->lbits, c->dbits, c->ltree, c->dtree, s, z);
4197        LOAD
4198        if (r != Z_OK)
4199        {
4200          c->mode = r == Z_STREAM_END ? WASH : BADCODE;
4201          break;
4202        }
4203      }
4204#endif /* !SLOW */
4205      c->sub.code.need = c->lbits;
4206      c->sub.code.tree = c->ltree;
4207      c->mode = LEN;
4208    case LEN:           /* i: get length/literal/eob next */
4209      j = c->sub.code.need;
4210      NEEDBITS(j)
4211      t = c->sub.code.tree + ((uInt)b & inflate_mask[j]);
4212      DUMPBITS(t->bits)
4213      e = (uInt)(t->exop);
4214      if (e == 0)               /* literal */
4215      {
4216        c->sub.lit = t->base;
4217        Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ?
4218                 "inflate:         literal '%c'\n" :
4219                 "inflate:         literal 0x%02x\n", t->base));
4220        c->mode = LIT;
4221        break;
4222      }
4223      if (e & 16)               /* length */
4224      {
4225        c->sub.copy.get = e & 15;
4226        c->len = t->base;
4227        c->mode = LENEXT;
4228        break;
4229      }
4230      if ((e & 64) == 0)        /* next table */
4231      {
4232        c->sub.code.need = e;
4233        c->sub.code.tree = t->next;
4234        break;
4235      }
4236      if (e & 32)               /* end of block */
4237      {
4238        Tracevv((stderr, "inflate:         end of block\n"));
4239        c->mode = WASH;
4240        break;
4241      }
4242      c->mode = BADCODE;        /* invalid code */
4243      z->msg = "invalid literal/length code";
4244      r = Z_DATA_ERROR;
4245      LEAVE
4246    case LENEXT:        /* i: getting length extra (have base) */
4247      j = c->sub.copy.get;
4248      NEEDBITS(j)
4249      c->len += (uInt)b & inflate_mask[j];
4250      DUMPBITS(j)
4251      c->sub.code.need = c->dbits;
4252      c->sub.code.tree = c->dtree;
4253      Tracevv((stderr, "inflate:         length %u\n", c->len));
4254      c->mode = DIST;
4255    case DIST:          /* i: get distance next */
4256      j = c->sub.code.need;
4257      NEEDBITS(j)
4258      t = c->sub.code.tree + ((uInt)b & inflate_mask[j]);
4259      DUMPBITS(t->bits)
4260      e = (uInt)(t->exop);
4261      if (e & 16)               /* distance */
4262      {
4263        c->sub.copy.get = e & 15;
4264        c->sub.copy.dist = t->base;
4265        c->mode = DISTEXT;
4266        break;
4267      }
4268      if ((e & 64) == 0)        /* next table */
4269      {
4270        c->sub.code.need = e;
4271        c->sub.code.tree = t->next;
4272        break;
4273      }
4274      c->mode = BADCODE;        /* invalid code */
4275      z->msg = "invalid distance code";
4276      r = Z_DATA_ERROR;
4277      LEAVE
4278    case DISTEXT:       /* i: getting distance extra */
4279      j = c->sub.copy.get;
4280      NEEDBITS(j)
4281      c->sub.copy.dist += (uInt)b & inflate_mask[j];
4282      DUMPBITS(j)
4283      Tracevv((stderr, "inflate:         distance %u\n", c->sub.copy.dist));
4284      c->mode = COPY;
4285    case COPY:          /* o: copying bytes in window, waiting for space */
4286#ifndef __TURBOC__ /* Turbo C bug for following expression */
4287      f = (uInt)(q - s->window) < c->sub.copy.dist ?
4288          s->end - (c->sub.copy.dist - (q - s->window)) :
4289          q - c->sub.copy.dist;
4290#else
4291      f = q - c->sub.copy.dist;
4292      if ((uInt)(q - s->window) < c->sub.copy.dist)
4293        f = s->end - (c->sub.copy.dist - (q - s->window));
4294#endif
4295      while (c->len)
4296      {
4297        NEEDOUT
4298        OUTBYTE(*f++)
4299        if (f == s->end)
4300          f = s->window;
4301        c->len--;
4302      }
4303      c->mode = START;
4304      break;
4305    case LIT:           /* o: got literal, waiting for output space */
4306      NEEDOUT
4307      OUTBYTE(c->sub.lit)
4308      c->mode = START;
4309      break;
4310    case WASH:          /* o: got eob, possibly more output */
4311      FLUSH
4312      if (s->read != s->write)
4313        LEAVE
4314      c->mode = END;
4315    case END:
4316      r = Z_STREAM_END;
4317      LEAVE
4318    case BADCODE:       /* x: got error */
4319      r = Z_DATA_ERROR;
4320      LEAVE
4321    default:
4322      r = Z_STREAM_ERROR;
4323      LEAVE
4324  }
4325}
4326
4327
4328local void inflate_codes_free(c, z)
4329inflate_codes_statef *c;
4330z_stream *z;
4331{
4332  ZFREE(z, c, sizeof(struct inflate_codes_state));
4333  Tracev((stderr, "inflate:       codes free\n"));
4334}
4335
4336/*+++++*/
4337/* inflate_util.c -- data and routines common to blocks and codes
4338 * Copyright (C) 1995 Mark Adler
4339 * For conditions of distribution and use, see copyright notice in zlib.h
4340 */
4341
4342/* copy as much as possible from the sliding window to the output area */
4343local int inflate_flush(s, z, r)
4344inflate_blocks_statef *s;
4345z_stream *z;
4346int r;
4347{
4348  uInt n;
4349  Bytef *p, *q;
4350
4351  /* local copies of source and destination pointers */
4352  p = z->next_out;
4353  q = s->read;
4354
4355  /* compute number of bytes to copy as far as end of window */
4356  n = (uInt)((q <= s->write ? s->write : s->end) - q);
4357  if (n > z->avail_out) n = z->avail_out;
4358  if (n && r == Z_BUF_ERROR) r = Z_OK;
4359
4360  /* update counters */
4361  z->avail_out -= n;
4362  z->total_out += n;
4363
4364  /* update check information */
4365  if (s->checkfn != Z_NULL)
4366    s->check = (*s->checkfn)(s->check, q, n);
4367
4368  /* copy as far as end of window */
4369  if (p != NULL) {
4370    zmemcpy(p, q, n);
4371    p += n;
4372  }
4373  q += n;
4374
4375  /* see if more to copy at beginning of window */
4376  if (q == s->end)
4377  {
4378    /* wrap pointers */
4379    q = s->window;
4380    if (s->write == s->end)
4381      s->write = s->window;
4382
4383    /* compute bytes to copy */
4384    n = (uInt)(s->write - q);
4385    if (n > z->avail_out) n = z->avail_out;
4386    if (n && r == Z_BUF_ERROR) r = Z_OK;
4387
4388    /* update counters */
4389    z->avail_out -= n;
4390    z->total_out += n;
4391
4392    /* update check information */
4393    if (s->checkfn != Z_NULL)
4394      s->check = (*s->checkfn)(s->check, q, n);
4395
4396    /* copy */
4397    if (p != NULL) {
4398      zmemcpy(p, q, n);
4399      p += n;
4400    }
4401    q += n;
4402  }
4403
4404  /* update pointers */
4405  z->next_out = p;
4406  s->read = q;
4407
4408  /* done */
4409  return r;
4410}
4411
4412
4413/*+++++*/
4414/* inffast.c -- process literals and length/distance pairs fast
4415 * Copyright (C) 1995 Mark Adler
4416 * For conditions of distribution and use, see copyright notice in zlib.h
4417 */
4418
4419/* simplify the use of the inflate_huft type with some defines */
4420#define base more.Base
4421#define next more.Next
4422#define exop word.what.Exop
4423#define bits word.what.Bits
4424
4425/* macros for bit input with no checking and for returning unused bytes */
4426#define GRABBITS(j) {while(k<(j)){b|=((uLong)NEXTBYTE)<<k;k+=8;}}
4427#define UNGRAB {n+=(c=k>>3);p-=c;k&=7;}
4428
4429/* Called with number of bytes left to write in window at least 258
4430   (the maximum string length) and number of input bytes available
4431   at least ten.  The ten bytes are six bytes for the longest length/
4432   distance pair plus four bytes for overloading the bit buffer. */
4433
4434local int inflate_fast(bl, bd, tl, td, s, z)
4435uInt bl, bd;
4436inflate_huft *tl, *td;
4437inflate_blocks_statef *s;
4438z_stream *z;
4439{
4440  inflate_huft *t;      /* temporary pointer */
4441  uInt e;               /* extra bits or operation */
4442  uLong b;              /* bit buffer */
4443  uInt k;               /* bits in bit buffer */
4444  Bytef *p;             /* input data pointer */
4445  uInt n;               /* bytes available there */
4446  Bytef *q;             /* output window write pointer */
4447  uInt m;               /* bytes to end of window or read pointer */
4448  uInt ml;              /* mask for literal/length tree */
4449  uInt md;              /* mask for distance tree */
4450  uInt c;               /* bytes to copy */
4451  uInt d;               /* distance back to copy from */
4452  Bytef *r;             /* copy source pointer */
4453
4454  /* load input, output, bit values */
4455  LOAD
4456
4457  /* initialize masks */
4458  ml = inflate_mask[bl];
4459  md = inflate_mask[bd];
4460
4461  /* do until not enough input or output space for fast loop */
4462  do {                          /* assume called with m >= 258 && n >= 10 */
4463    /* get literal/length code */
4464    GRABBITS(20)                /* max bits for literal/length code */
4465    if ((e = (t = tl + ((uInt)b & ml))->exop) == 0)
4466    {
4467      DUMPBITS(t->bits)
4468      Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ?
4469                "inflate:         * literal '%c'\n" :
4470                "inflate:         * literal 0x%02x\n", t->base));
4471      *q++ = (Byte)t->base;
4472      m--;
4473      continue;
4474    }
4475    do {
4476      DUMPBITS(t->bits)
4477      if (e & 16)
4478      {
4479        /* get extra bits for length */
4480        e &= 15;
4481        c = t->base + ((uInt)b & inflate_mask[e]);
4482        DUMPBITS(e)
4483        Tracevv((stderr, "inflate:         * length %u\n", c));
4484
4485        /* decode distance base of block to copy */
4486        GRABBITS(15);           /* max bits for distance code */
4487        e = (t = td + ((uInt)b & md))->exop;
4488        do {
4489          DUMPBITS(t->bits)
4490          if (e & 16)
4491          {
4492            /* get extra bits to add to distance base */
4493            e &= 15;
4494            GRABBITS(e)         /* get extra bits (up to 13) */
4495            d = t->base + ((uInt)b & inflate_mask[e]);
4496            DUMPBITS(e)
4497            Tracevv((stderr, "inflate:         * distance %u\n", d));
4498
4499            /* do the copy */
4500            m -= c;
4501            if ((uInt)(q - s->window) >= d)     /* offset before dest */
4502            {                                   /*  just copy */
4503              r = q - d;
4504              *q++ = *r++;  c--;        /* minimum count is three, */
4505              *q++ = *r++;  c--;        /*  so unroll loop a little */
4506            }
4507            else                        /* else offset after destination */
4508            {
4509              e = d - (q - s->window);  /* bytes from offset to end */
4510              r = s->end - e;           /* pointer to offset */
4511              if (c > e)                /* if source crosses, */
4512              {
4513                c -= e;                 /* copy to end of window */
4514                do {
4515                  *q++ = *r++;
4516                } while (--e);
4517                r = s->window;          /* copy rest from start of window */
4518              }
4519            }
4520            do {                        /* copy all or what's left */
4521              *q++ = *r++;
4522            } while (--c);
4523            break;
4524          }
4525          else if ((e & 64) == 0)
4526            e = (t = t->next + ((uInt)b & inflate_mask[e]))->exop;
4527          else
4528          {
4529            z->msg = "invalid distance code";
4530            UNGRAB
4531            UPDATE
4532            return Z_DATA_ERROR;
4533          }
4534        } while (1);
4535        break;
4536      }
4537      if ((e & 64) == 0)
4538      {
4539        if ((e = (t = t->next + ((uInt)b & inflate_mask[e]))->exop) == 0)
4540        {
4541          DUMPBITS(t->bits)
4542          Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ?
4543                    "inflate:         * literal '%c'\n" :
4544                    "inflate:         * literal 0x%02x\n", t->base));
4545          *q++ = (Byte)t->base;
4546          m--;
4547          break;
4548        }
4549      }
4550      else if (e & 32)
4551      {
4552        Tracevv((stderr, "inflate:         * end of block\n"));
4553        UNGRAB
4554        UPDATE
4555        return Z_STREAM_END;
4556      }
4557      else
4558      {
4559        z->msg = "invalid literal/length code";
4560        UNGRAB
4561        UPDATE
4562        return Z_DATA_ERROR;
4563      }
4564    } while (1);
4565  } while (m >= 258 && n >= 10);
4566
4567  /* not enough input or output--restore pointers and return */
4568  UNGRAB
4569  UPDATE
4570  return Z_OK;
4571}
4572
4573
4574/*+++++*/
4575/* zutil.c -- target dependent utility functions for the compression library
4576 * Copyright (C) 1995 Jean-loup Gailly.
4577 * For conditions of distribution and use, see copyright notice in zlib.h
4578 */
4579
4580/* From: zutil.c,v 1.8 1995/05/03 17:27:12 jloup Exp */
4581
4582char *zlib_version = ZLIB_VERSION;
4583
4584char *z_errmsg[] = {
4585"stream end",          /* Z_STREAM_END    1 */
4586"",                    /* Z_OK            0 */
4587"file error",          /* Z_ERRNO        (-1) */
4588"stream error",        /* Z_STREAM_ERROR (-2) */
4589"data error",          /* Z_DATA_ERROR   (-3) */
4590"insufficient memory", /* Z_MEM_ERROR    (-4) */
4591"buffer error",        /* Z_BUF_ERROR    (-5) */
4592""};
4593
4594
4595/*+++++*/
4596/* adler32.c -- compute the Adler-32 checksum of a data stream
4597 * Copyright (C) 1995 Mark Adler
4598 * For conditions of distribution and use, see copyright notice in zlib.h
4599 */
4600
4601/* From: adler32.c,v 1.6 1995/05/03 17:27:08 jloup Exp */
4602
4603#define BASE 65521L /* largest prime smaller than 65536 */
4604#define NMAX 5552
4605/* NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1 */
4606
4607#define DO1(buf)  {s1 += *buf++; s2 += s1;}
4608#define DO2(buf)  DO1(buf); DO1(buf);
4609#define DO4(buf)  DO2(buf); DO2(buf);
4610#define DO8(buf)  DO4(buf); DO4(buf);
4611#define DO16(buf) DO8(buf); DO8(buf);
4612
4613/* ========================================================================= */
4614uLong adler32(adler, buf, len)
4615    uLong adler;
4616    Bytef *buf;
4617    uInt len;
4618{
4619    unsigned long s1 = adler & 0xffff;
4620    unsigned long s2 = (adler >> 16) & 0xffff;
4621    int k;
4622
4623    if (buf == Z_NULL) return 1L;
4624
4625    while (len > 0) {
4626        k = len < NMAX ? len : NMAX;
4627        len -= k;
4628        while (k >= 16) {
4629            DO16(buf);
4630            k -= 16;
4631        }
4632        if (k != 0) do {
4633            DO1(buf);
4634        } while (--k);
4635        s1 %= BASE;
4636        s2 %= BASE;
4637    }
4638    return (s2 << 16) | s1;
4639}
4640