1/*
2  Copyright (c) 1990-2007 Info-ZIP.  All rights reserved.
3
4  See the accompanying file LICENSE, version 2007-Mar-04 or later
5  (the contents of which are also included in unzip.h) for terms of use.
6  If, for some reason, all these files are missing, the Info-ZIP license
7  also may be found at:  ftp://ftp.info-zip.org/pub/infozip/license.html
8*/
9/* explode.c -- by Mark Adler
10   version c17d, 01 December 2007 */
11
12
13/* Copyright history:
14   - Starting with UnZip 5.41 of 16-April-2000, this source file
15     is covered by the Info-Zip LICENSE cited above.
16   - Prior versions of this source file, found in UnZip source packages
17     up to UnZip 5.40, were put in the public domain.
18     The original copyright note by Mark Adler was:
19         "You can do whatever you like with this source file,
20         though I would prefer that if you modify it and
21         redistribute it that you include comments to that effect
22         with your name and the date.  Thank you."
23
24   History:
25   vers    date          who           what
26   ----  ---------  --------------  ------------------------------------
27    c1   30 Mar 92  M. Adler        explode that uses huft_build from inflate
28                                    (this gives over a 70% speed improvement
29                                    over the original unimplode.c, which
30                                    decoded a bit at a time)
31    c2    4 Apr 92  M. Adler        fixed bug for file sizes a multiple of 32k.
32    c3   10 Apr 92  M. Adler        added a little memory tracking if DEBUG
33    c4   11 Apr 92  M. Adler        added NOMEMCPY do kill use of memcpy()
34    c5   21 Apr 92  M. Adler        added the WSIZE #define to allow reducing
35                                    the 32K window size for specialized
36                                    applications.
37    c6   31 May 92  M. Adler        added typecasts to eliminate some warnings
38    c7   27 Jun 92  G. Roelofs      added more typecasts.
39    c8   17 Oct 92  G. Roelofs      changed ULONG/UWORD/byte to ulg/ush/uch.
40    c9   19 Jul 93  J. Bush         added more typecasts (to return values);
41                                    made l[256] array static for Amiga.
42    c10   8 Oct 93  G. Roelofs      added used_csize for diagnostics; added
43                                    buf and unshrink arguments to flush();
44                                    undef'd various macros at end for Turbo C;
45                                    removed NEXTBYTE macro (now in unzip.h)
46                                    and bytebuf variable (not used); changed
47                                    memset() to memzero().
48    c11   9 Jan 94  M. Adler        fixed incorrect used_csize calculation.
49    c12   9 Apr 94  G. Roelofs      fixed split comments on preprocessor lines
50                                    to avoid bug in Encore compiler.
51    c13  25 Aug 94  M. Adler        fixed distance-length comment (orig c9 fix)
52    c14  22 Nov 95  S. Maxwell      removed unnecessary "static" on auto array
53    c15   6 Jul 96  W. Haidinger    added ulg typecasts to flush() calls.
54    c16   8 Feb 98  C. Spieler      added ZCONST modifiers to const tables
55                                    and #ifdef DEBUG around debugging code.
56    c16b 25 Mar 98  C. Spieler      modified DLL code for slide redirection.
57    c16d 05 Jul 99  C. Spieler      take care of flush() return values and
58                                    stop processing in case of errors
59    c17  04 Feb 01  C. Spieler      reorganized code to reduce repetitions
60                                    of large code parts; adapted huft decoding
61                                    to the changes in inflate's huft_build()
62                                    due to support of deflate64; fixed memory
63                                    leaks (huft tables were not free'd when
64                                    get_tree() failed).
65    c17b 16 Feb 02  C. Spieler      changed type of the "extra lengths" array
66                                    "extra" from ush into uch (to save space)
67    c17c 10 Aug 04  NN              file sizes use zoff_t.
68    c17d 01 Dec 07  C. Spieler      type for file sizes changed from zoff_t
69                                    into zusz_t.
70 */
71
72
73/*
74   Explode imploded (PKZIP method 6 compressed) data.  This compression
75   method searches for as much of the current string of bytes (up to a length
76   of ~320) in the previous 4K or 8K bytes.  If it doesn't find any matches
77   (of at least length 2 or 3), it codes the next byte.  Otherwise, it codes
78   the length of the matched string and its distance backwards from the
79   current position.  Single bytes ("literals") are preceded by a one (a
80   single bit) and are either uncoded (the eight bits go directly into the
81   compressed stream for a total of nine bits) or Huffman coded with a
82   supplied literal code tree.  If literals are coded, then the minimum match
83   length is three, otherwise it is two.
84
85   There are therefore four kinds of imploded streams: 8K search with coded
86   literals (min match = 3), 4K search with coded literals (min match = 3),
87   8K with uncoded literals (min match = 2), and 4K with uncoded literals
88   (min match = 2).  The kind of stream is identified in two bits of a
89   general purpose bit flag that is outside of the compressed stream.
90
91   Distance-length pairs for matched strings are preceded by a zero bit (to
92   distinguish them from literals) and are always coded.  The distance comes
93   first and is either the low six (4K) or low seven (8K) bits of the
94   distance (uncoded), followed by the high six bits of the distance coded.
95   Then the length is six bits coded (0..63 + min match length), and if the
96   maximum such length is coded, then it's followed by another eight bits
97   (uncoded) to be added to the coded length.  This gives a match length
98   range of 2..320 or 3..321 bytes.
99
100   The literal, length, and distance codes are all represented in a slightly
101   compressed form themselves.  What is sent are the lengths of the codes for
102   each value, which is sufficient to construct the codes.  Each byte of the
103   code representation is the code length (the low four bits representing
104   1..16), and the number of values sequentially with that length (the high
105   four bits also representing 1..16).  There are 256 literal code values (if
106   literals are coded), 64 length code values, and 64 distance code values,
107   in that order at the beginning of the compressed stream.  Each set of code
108   values is preceded (redundantly) with a byte indicating how many bytes are
109   in the code description that follows, in the range 1..256.
110
111   The codes themselves are decoded using tables made by huft_build() from
112   the bit lengths.  That routine and its comments are in the inflate.c
113   module.
114 */
115
116#define __EXPLODE_C     /* identifies this source module */
117#define UNZIP_INTERNAL
118#include "unzip.h"      /* must supply slide[] (uch) array and NEXTBYTE macro */
119
120#ifndef WSIZE
121#  define WSIZE 0x8000  /* window size--must be a power of two, and */
122#endif                  /* at least 8K for zip's implode method */
123
124#if (defined(DLL) && !defined(NO_SLIDE_REDIR))
125#  define wszimpl (unsigned)(G._wsize)
126#else
127#  if defined(USE_DEFLATE64) && defined(INT_16BIT)
128#    define wszimpl (unsigned)(WSIZE>>1)
129#  else /* !(USE_DEFLATE64 && INT_16BIT) */
130#    define wszimpl WSIZE
131#  endif /* !(USE_DEFLATE64 && INT_16BIT) */
132#endif
133
134/* routines here */
135static int get_tree OF((__GPRO__ unsigned *l, unsigned n));
136static int explode_lit OF((__GPRO__ struct huft *tb, struct huft *tl,
137                           struct huft *td, unsigned bb, unsigned bl,
138                           unsigned bd, unsigned bdl));
139static int explode_nolit OF((__GPRO__ struct huft *tl, struct huft *td,
140                             unsigned bl, unsigned bd, unsigned bdl));
141int explode OF((__GPRO));
142
143
144/* The implode algorithm uses a sliding 4K or 8K byte window on the
145   uncompressed stream to find repeated byte strings.  This is implemented
146   here as a circular buffer.  The index is updated simply by incrementing
147   and then and'ing with 0x0fff (4K-1) or 0x1fff (8K-1).  Here, the 32K
148   buffer of inflate is used, and it works just as well to always have
149   a 32K circular buffer, so the index is anded with 0x7fff.  This is
150   done to allow the window to also be used as the output buffer. */
151/* This must be supplied in an external module useable like "uch slide[8192];"
152   or "uch *slide;", where the latter would be malloc'ed.  In unzip, slide[]
153   is actually a 32K area for use by inflate, which uses a 32K sliding window.
154 */
155
156
157#define INVALID_CODE 99
158#define IS_INVALID_CODE(c)  ((c) == INVALID_CODE)
159
160/* Tables for length and distance */
161static ZCONST ush cplen2[] =
162        {2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17,
163        18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34,
164        35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51,
165        52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65};
166static ZCONST ush cplen3[] =
167        {3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18,
168        19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35,
169        36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52,
170        53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66};
171static ZCONST uch extra[] =
172        {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
173        0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
174        0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
175        8};
176static ZCONST ush cpdist4[] =
177        {1, 65, 129, 193, 257, 321, 385, 449, 513, 577, 641, 705,
178        769, 833, 897, 961, 1025, 1089, 1153, 1217, 1281, 1345, 1409, 1473,
179        1537, 1601, 1665, 1729, 1793, 1857, 1921, 1985, 2049, 2113, 2177,
180        2241, 2305, 2369, 2433, 2497, 2561, 2625, 2689, 2753, 2817, 2881,
181        2945, 3009, 3073, 3137, 3201, 3265, 3329, 3393, 3457, 3521, 3585,
182        3649, 3713, 3777, 3841, 3905, 3969, 4033};
183static ZCONST ush cpdist8[] =
184        {1, 129, 257, 385, 513, 641, 769, 897, 1025, 1153, 1281,
185        1409, 1537, 1665, 1793, 1921, 2049, 2177, 2305, 2433, 2561, 2689,
186        2817, 2945, 3073, 3201, 3329, 3457, 3585, 3713, 3841, 3969, 4097,
187        4225, 4353, 4481, 4609, 4737, 4865, 4993, 5121, 5249, 5377, 5505,
188        5633, 5761, 5889, 6017, 6145, 6273, 6401, 6529, 6657, 6785, 6913,
189        7041, 7169, 7297, 7425, 7553, 7681, 7809, 7937, 8065};
190
191
192/* Macros for inflate() bit peeking and grabbing.
193   The usage is:
194
195        NEEDBITS(j)
196        x = b & mask_bits[j];
197        DUMPBITS(j)
198
199   where NEEDBITS makes sure that b has at least j bits in it, and
200   DUMPBITS removes the bits from b.  The macros use the variable k
201   for the number of bits in b.  Normally, b and k are register
202   variables for speed.
203 */
204
205#define NEEDBITS(n) {while(k<(n)){b|=((ulg)NEXTBYTE)<<k;k+=8;}}
206#define DUMPBITS(n) {b>>=(n);k-=(n);}
207
208#define DECODEHUFT(htab, bits, mask) {\
209  NEEDBITS((unsigned)(bits))\
210  t = (htab) + ((~(unsigned)b)&(mask));\
211  while (1) {\
212    DUMPBITS(t->b)\
213    if ((e=t->e) <= 32) break;\
214    if (IS_INVALID_CODE(e)) return 1;\
215    e &= 31;\
216    NEEDBITS(e)\
217    t = t->v.t + ((~(unsigned)b)&mask_bits[e]);\
218  }\
219}
220
221
222static int get_tree(__G__ l, n)
223     __GDEF
224unsigned *l;            /* bit lengths */
225unsigned n;             /* number expected */
226/* Get the bit lengths for a code representation from the compressed
227   stream.  If get_tree() returns 4, then there is an error in the data.
228   Otherwise zero is returned. */
229{
230  unsigned i;           /* bytes remaining in list */
231  unsigned k;           /* lengths entered */
232  unsigned j;           /* number of codes */
233  unsigned b;           /* bit length for those codes */
234
235
236  /* get bit lengths */
237  i = NEXTBYTE + 1;                     /* length/count pairs to read */
238  k = 0;                                /* next code */
239  do {
240    b = ((j = NEXTBYTE) & 0xf) + 1;     /* bits in code (1..16) */
241    j = ((j & 0xf0) >> 4) + 1;          /* codes with those bits (1..16) */
242    if (k + j > n)
243      return 4;                         /* don't overflow l[] */
244    do {
245      l[k++] = b;
246    } while (--j);
247  } while (--i);
248  return k != n ? 4 : 0;                /* should have read n of them */
249}
250
251
252
253static int explode_lit(__G__ tb, tl, td, bb, bl, bd, bdl)
254     __GDEF
255struct huft *tb, *tl, *td;      /* literal, length, and distance tables */
256unsigned bb, bl, bd;            /* number of bits decoded by those */
257unsigned bdl;                   /* number of distance low bits */
258/* Decompress the imploded data using coded literals and a sliding
259   window (of size 2^(6+bdl) bytes). */
260{
261  zusz_t s;             /* bytes to decompress */
262  register unsigned e;  /* table entry flag/number of extra bits */
263  unsigned n, d;        /* length and index for copy */
264  unsigned w;           /* current window position */
265  struct huft *t;       /* pointer to table entry */
266  unsigned mb, ml, md;  /* masks for bb, bl, and bd bits */
267  unsigned mdl;         /* mask for bdl (distance lower) bits */
268  register ulg b;       /* bit buffer */
269  register unsigned k;  /* number of bits in bit buffer */
270  unsigned u;           /* true if unflushed */
271  int retval = 0;       /* error code returned: initialized to "no error" */
272
273
274  /* explode the coded data */
275  b = k = w = 0;                /* initialize bit buffer, window */
276  u = 1;                        /* buffer unflushed */
277  mb = mask_bits[bb];           /* precompute masks for speed */
278  ml = mask_bits[bl];
279  md = mask_bits[bd];
280  mdl = mask_bits[bdl];
281  s = G.lrec.ucsize;
282  while (s > 0)                 /* do until ucsize bytes uncompressed */
283  {
284    NEEDBITS(1)
285    if (b & 1)                  /* then literal--decode it */
286    {
287      DUMPBITS(1)
288      s--;
289      DECODEHUFT(tb, bb, mb)    /* get coded literal */
290      redirSlide[w++] = (uch)t->v.n;
291      if (w == wszimpl)
292      {
293        if ((retval = flush(__G__ redirSlide, (ulg)w, 0)) != 0)
294          return retval;
295        w = u = 0;
296      }
297    }
298    else                        /* else distance/length */
299    {
300      DUMPBITS(1)
301      NEEDBITS(bdl)             /* get distance low bits */
302      d = (unsigned)b & mdl;
303      DUMPBITS(bdl)
304      DECODEHUFT(td, bd, md)    /* get coded distance high bits */
305      d = w - d - t->v.n;       /* construct offset */
306      DECODEHUFT(tl, bl, ml)    /* get coded length */
307      n = t->v.n;
308      if (e)                    /* get length extra bits */
309      {
310        NEEDBITS(8)
311        n += (unsigned)b & 0xff;
312        DUMPBITS(8)
313      }
314
315      /* do the copy */
316      s = (s > (zusz_t)n ? s - (zusz_t)n : 0);
317      do {
318#if (defined(DLL) && !defined(NO_SLIDE_REDIR))
319        if (G.redirect_slide) {
320          /* &= w/ wszimpl not needed and wrong if redirect */
321          if (d >= wszimpl)
322            return 1;
323          e = wszimpl - (d > w ? d : w);
324        } else
325#endif
326          e = wszimpl - ((d &= wszimpl-1) > w ? d : w);
327        if (e > n) e = n;
328        n -= e;
329        if (u && w <= d)
330        {
331          memzero(redirSlide + w, e);
332          w += e;
333          d += e;
334        }
335        else
336#ifndef NOMEMCPY
337          if (w - d >= e)       /* (this test assumes unsigned comparison) */
338          {
339            memcpy(redirSlide + w, redirSlide + d, e);
340            w += e;
341            d += e;
342          }
343          else                  /* do it slow to avoid memcpy() overlap */
344#endif /* !NOMEMCPY */
345            do {
346              redirSlide[w++] = redirSlide[d++];
347            } while (--e);
348        if (w == wszimpl)
349        {
350          if ((retval = flush(__G__ redirSlide, (ulg)w, 0)) != 0)
351            return retval;
352          w = u = 0;
353        }
354      } while (n);
355    }
356  }
357
358  /* flush out redirSlide */
359  if ((retval = flush(__G__ redirSlide, (ulg)w, 0)) != 0)
360    return retval;
361  if (G.csize + G.incnt + (k >> 3))   /* should have read csize bytes, but */
362  {                        /* sometimes read one too many:  k>>3 compensates */
363    G.used_csize = G.lrec.csize - G.csize - G.incnt - (k >> 3);
364    return 5;
365  }
366  return 0;
367}
368
369
370
371static int explode_nolit(__G__ tl, td, bl, bd, bdl)
372     __GDEF
373struct huft *tl, *td;   /* length and distance decoder tables */
374unsigned bl, bd;        /* number of bits decoded by tl[] and td[] */
375unsigned bdl;           /* number of distance low bits */
376/* Decompress the imploded data using uncoded literals and a sliding
377   window (of size 2^(6+bdl) bytes). */
378{
379  zusz_t s;             /* bytes to decompress */
380  register unsigned e;  /* table entry flag/number of extra bits */
381  unsigned n, d;        /* length and index for copy */
382  unsigned w;           /* current window position */
383  struct huft *t;       /* pointer to table entry */
384  unsigned ml, md;      /* masks for bl and bd bits */
385  unsigned mdl;         /* mask for bdl (distance lower) bits */
386  register ulg b;       /* bit buffer */
387  register unsigned k;  /* number of bits in bit buffer */
388  unsigned u;           /* true if unflushed */
389  int retval = 0;       /* error code returned: initialized to "no error" */
390
391
392  /* explode the coded data */
393  b = k = w = 0;                /* initialize bit buffer, window */
394  u = 1;                        /* buffer unflushed */
395  ml = mask_bits[bl];           /* precompute masks for speed */
396  md = mask_bits[bd];
397  mdl = mask_bits[bdl];
398  s = G.lrec.ucsize;
399  while (s > 0)                 /* do until ucsize bytes uncompressed */
400  {
401    NEEDBITS(1)
402    if (b & 1)                  /* then literal--get eight bits */
403    {
404      DUMPBITS(1)
405      s--;
406      NEEDBITS(8)
407      redirSlide[w++] = (uch)b;
408      if (w == wszimpl)
409      {
410        if ((retval = flush(__G__ redirSlide, (ulg)w, 0)) != 0)
411          return retval;
412        w = u = 0;
413      }
414      DUMPBITS(8)
415    }
416    else                        /* else distance/length */
417    {
418      DUMPBITS(1)
419      NEEDBITS(bdl)             /* get distance low bits */
420      d = (unsigned)b & mdl;
421      DUMPBITS(bdl)
422      DECODEHUFT(td, bd, md)    /* get coded distance high bits */
423      d = w - d - t->v.n;       /* construct offset */
424      DECODEHUFT(tl, bl, ml)    /* get coded length */
425      n = t->v.n;
426      if (e)                    /* get length extra bits */
427      {
428        NEEDBITS(8)
429        n += (unsigned)b & 0xff;
430        DUMPBITS(8)
431      }
432
433      /* do the copy */
434      s = (s > (zusz_t)n ? s - (zusz_t)n : 0);
435      do {
436#if (defined(DLL) && !defined(NO_SLIDE_REDIR))
437        if (G.redirect_slide) {
438          /* &= w/ wszimpl not needed and wrong if redirect */
439          if (d >= wszimpl)
440            return 1;
441          e = wszimpl - (d > w ? d : w);
442        } else
443#endif
444          e = wszimpl - ((d &= wszimpl-1) > w ? d : w);
445        if (e > n) e = n;
446        n -= e;
447        if (u && w <= d)
448        {
449          memzero(redirSlide + w, e);
450          w += e;
451          d += e;
452        }
453        else
454#ifndef NOMEMCPY
455          if (w - d >= e)       /* (this test assumes unsigned comparison) */
456          {
457            memcpy(redirSlide + w, redirSlide + d, e);
458            w += e;
459            d += e;
460          }
461          else                  /* do it slow to avoid memcpy() overlap */
462#endif /* !NOMEMCPY */
463            do {
464              redirSlide[w++] = redirSlide[d++];
465            } while (--e);
466        if (w == wszimpl)
467        {
468          if ((retval = flush(__G__ redirSlide, (ulg)w, 0)) != 0)
469            return retval;
470          w = u = 0;
471        }
472      } while (n);
473    }
474  }
475
476  /* flush out redirSlide */
477  if ((retval = flush(__G__ redirSlide, (ulg)w, 0)) != 0)
478    return retval;
479  if (G.csize + G.incnt + (k >> 3))   /* should have read csize bytes, but */
480  {                        /* sometimes read one too many:  k>>3 compensates */
481    G.used_csize = G.lrec.csize - G.csize - G.incnt - (k >> 3);
482    return 5;
483  }
484  return 0;
485}
486
487
488
489int explode(__G)
490     __GDEF
491/* Explode an imploded compressed stream.  Based on the general purpose
492   bit flag, decide on coded or uncoded literals, and an 8K or 4K sliding
493   window.  Construct the literal (if any), length, and distance codes and
494   the tables needed to decode them (using huft_build() from inflate.c),
495   and call the appropriate routine for the type of data in the remainder
496   of the stream.  The four routines are nearly identical, differing only
497   in whether the literal is decoded or simply read in, and in how many
498   bits are read in, uncoded, for the low distance bits. */
499{
500  unsigned r;           /* return codes */
501  struct huft *tb;      /* literal code table */
502  struct huft *tl;      /* length code table */
503  struct huft *td;      /* distance code table */
504  unsigned bb;          /* bits for tb */
505  unsigned bl;          /* bits for tl */
506  unsigned bd;          /* bits for td */
507  unsigned bdl;         /* number of uncoded lower distance bits */
508  unsigned l[256];      /* bit lengths for codes */
509
510#if (defined(DLL) && !defined(NO_SLIDE_REDIR))
511  if (G.redirect_slide)
512    /* For 16-bit systems, it has already been checked at DLL entrance that
513     * the buffer size in G.redirect_size does not exceed unsigned range.
514     */
515    G._wsize = G.redirect_size, redirSlide = G.redirect_buffer;
516  else
517#if defined(USE_DEFLATE64) && defined(INT_16BIT)
518    /* For systems using 16-bit ints, reduce the used buffer size below
519     * the limit of "unsigned int" numbers range.
520     */
521    G._wsize = WSIZE>>1, redirSlide = slide;
522#else /* !(USE_DEFLATE64 && INT_16BIT) */
523    G._wsize = WSIZE, redirSlide = slide;
524#endif /* !(USE_DEFLATE64 && INT_16BIT) */
525#endif /* DLL && !NO_SLIDE_REDIR */
526
527  /* Tune base table sizes.  Note: I thought that to truly optimize speed,
528     I would have to select different bl, bd, and bb values for different
529     compressed file sizes.  I was surprised to find out that the values of
530     7, 7, and 9 worked best over a very wide range of sizes, except that
531     bd = 8 worked marginally better for large compressed sizes. */
532  bl = 7;
533  bd = (G.csize + G.incnt) > 200000L ? 8 : 7;
534
535#ifdef DEBUG
536  G.hufts = 0;                    /* initialize huft's malloc'ed */
537#endif
538
539  if (G.lrec.general_purpose_bit_flag & 4)
540  /* With literal tree--minimum match length is 3 */
541  {
542    bb = 9;                     /* base table size for literals */
543    if ((r = get_tree(__G__ l, 256)) != 0)
544      return (int)r;
545    if ((r = huft_build(__G__ l, 256, 256, NULL, NULL, &tb, &bb)) != 0)
546    {
547      if (r == 1)
548        huft_free(tb);
549      return (int)r;
550    }
551    if ((r = get_tree(__G__ l, 64)) != 0) {
552      huft_free(tb);
553      return (int)r;
554    }
555    if ((r = huft_build(__G__ l, 64, 0, cplen3, extra, &tl, &bl)) != 0)
556    {
557      if (r == 1)
558        huft_free(tl);
559      huft_free(tb);
560      return (int)r;
561    }
562  }
563  else
564  /* No literal tree--minimum match length is 2 */
565  {
566    tb = (struct huft *)NULL;
567    if ((r = get_tree(__G__ l, 64)) != 0)
568      return (int)r;
569    if ((r = huft_build(__G__ l, 64, 0, cplen2, extra, &tl, &bl)) != 0)
570    {
571      if (r == 1)
572        huft_free(tl);
573      return (int)r;
574    }
575  }
576
577  if ((r = get_tree(__G__ l, 64)) != 0) {
578    huft_free(tl);
579    if (tb != (struct huft *)NULL) huft_free(tb);
580    return (int)r;
581  }
582  if (G.lrec.general_purpose_bit_flag & 2)      /* true if 8K */
583  {
584    bdl = 7;
585    r = huft_build(__G__ l, 64, 0, cpdist8, extra, &td, &bd);
586  }
587  else                                          /* else 4K */
588  {
589    bdl = 6;
590    r = huft_build(__G__ l, 64, 0, cpdist4, extra, &td, &bd);
591  }
592  if (r != 0)
593  {
594    if (r == 1)
595      huft_free(td);
596    huft_free(tl);
597    if (tb != (struct huft *)NULL) huft_free(tb);
598    return (int)r;
599  }
600
601  if (tb != NULL) {
602    r = explode_lit(__G__ tb, tl, td, bb, bl, bd, bdl);
603    huft_free(tb);
604  } else {
605    r = explode_nolit(__G__ tl, td, bl, bd, bdl);
606  }
607
608  huft_free(td);
609  huft_free(tl);
610  Trace((stderr, "<%u > ", G.hufts));
611  return (int)r;
612}
613
614/* so explode.c and inflate.c can be compiled together into one object: */
615#undef DECODEHUFT
616#undef NEEDBITS
617#undef DUMPBITS
618#undef wszimpl
619