10
|
1 /* inftrees.c -- generate Huffman trees for efficient decoding
|
|
2 * Copyright (C) 1995 Mark Adler
|
|
3 * For conditions of distribution and use, see copyright notice in zlib.h
|
|
4 */
|
|
5
|
|
6 #include "zutil.h"
|
|
7 #include "inftrees.h"
|
|
8
|
|
9 struct internal_state {int dummy;}; /* for buggy compilers */
|
|
10
|
|
11 /* simplify the use of the inflate_huft type with some defines */
|
|
12 #define base more.Base
|
|
13 #define next more.Next
|
|
14 #define exop word.what.Exop
|
|
15 #define bits word.what.Bits
|
|
16
|
|
17
|
|
18 local int huft_build OF((
|
|
19 uIntf *, /* code lengths in bits */
|
|
20 uInt, /* number of codes */
|
|
21 uInt, /* number of "simple" codes */
|
|
22 uIntf *, /* list of base values for non-simple codes */
|
|
23 uIntf *, /* list of extra bits for non-simple codes */
|
|
24 inflate_huft * FAR*,/* result: starting table */
|
|
25 uIntf *, /* maximum lookup bits (returns actual) */
|
|
26 z_stream *)); /* for zalloc function */
|
|
27
|
|
28 local voidpf falloc OF((
|
|
29 voidpf, /* opaque pointer (not used) */
|
|
30 uInt, /* number of items */
|
|
31 uInt)); /* size of item */
|
|
32
|
|
33 local void ffree OF((
|
|
34 voidpf q, /* opaque pointer (not used) */
|
|
35 voidpf p)); /* what to free (not used) */
|
|
36
|
|
37 /* Tables for deflate from PKZIP's appnote.txt. */
|
|
38 local uInt cplens[] = { /* Copy lengths for literal codes 257..285 */
|
|
39 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31,
|
|
40 35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0};
|
|
41 /* actually lengths - 2; also see note #13 above about 258 */
|
|
42 local uInt cplext[] = { /* Extra bits for literal codes 257..285 */
|
|
43 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2,
|
|
44 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0, 192, 192}; /* 192==invalid */
|
|
45 local uInt cpdist[] = { /* Copy offsets for distance codes 0..29 */
|
|
46 1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193,
|
|
47 257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145,
|
|
48 8193, 12289, 16385, 24577};
|
|
49 local uInt cpdext[] = { /* Extra bits for distance codes */
|
|
50 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6,
|
|
51 7, 7, 8, 8, 9, 9, 10, 10, 11, 11,
|
|
52 12, 12, 13, 13};
|
|
53
|
|
54 /*
|
|
55 Huffman code decoding is performed using a multi-level table lookup.
|
|
56 The fastest way to decode is to simply build a lookup table whose
|
|
57 size is determined by the longest code. However, the time it takes
|
|
58 to build this table can also be a factor if the data being decoded
|
|
59 is not very long. The most common codes are necessarily the
|
|
60 shortest codes, so those codes dominate the decoding time, and hence
|
|
61 the speed. The idea is you can have a shorter table that decodes the
|
|
62 shorter, more probable codes, and then point to subsidiary tables for
|
|
63 the longer codes. The time it costs to decode the longer codes is
|
|
64 then traded against the time it takes to make longer tables.
|
|
65
|
|
66 This results of this trade are in the variables lbits and dbits
|
|
67 below. lbits is the number of bits the first level table for literal/
|
|
68 length codes can decode in one step, and dbits is the same thing for
|
|
69 the distance codes. Subsequent tables are also less than or equal to
|
|
70 those sizes. These values may be adjusted either when all of the
|
|
71 codes are shorter than that, in which case the longest code length in
|
|
72 bits is used, or when the shortest code is *longer* than the requested
|
|
73 table size, in which case the length of the shortest code in bits is
|
|
74 used.
|
|
75
|
|
76 There are two different values for the two tables, since they code a
|
|
77 different number of possibilities each. The literal/length table
|
|
78 codes 286 possible values, or in a flat code, a little over eight
|
|
79 bits. The distance table codes 30 possible values, or a little less
|
|
80 than five bits, flat. The optimum values for speed end up being
|
|
81 about one bit more than those, so lbits is 8+1 and dbits is 5+1.
|
|
82 The optimum values may differ though from machine to machine, and
|
|
83 possibly even between compilers. Your mileage may vary.
|
|
84 */
|
|
85
|
|
86
|
|
87 /* If BMAX needs to be larger than 16, then h and x[] should be uLong. */
|
|
88 #define BMAX 15 /* maximum bit length of any code */
|
|
89 #define N_MAX 288 /* maximum number of codes in any set */
|
|
90
|
|
91 #ifdef DEBUG
|
|
92 uInt inflate_hufts;
|
|
93 #endif
|
|
94
|
|
95 local int huft_build(b, n, s, d, e, t, m, zs)
|
|
96 uIntf *b; /* code lengths in bits (all assumed <= BMAX) */
|
|
97 uInt n; /* number of codes (assumed <= N_MAX) */
|
|
98 uInt s; /* number of simple-valued codes (0..s-1) */
|
|
99 uIntf *d; /* list of base values for non-simple codes */
|
|
100 uIntf *e; /* list of extra bits for non-simple codes */
|
|
101 inflate_huft * FAR *t; /* result: starting table */
|
|
102 uIntf *m; /* maximum lookup bits, returns actual */
|
|
103 z_stream *zs; /* for zalloc function */
|
|
104 /* Given a list of code lengths and a maximum table size, make a set of
|
|
105 tables to decode that set of codes. Return Z_OK on success, Z_BUF_ERROR
|
|
106 if the given code set is incomplete (the tables are still built in this
|
|
107 case), Z_DATA_ERROR if the input is invalid (all zero length codes or an
|
|
108 over-subscribed set of lengths), or Z_MEM_ERROR if not enough memory. */
|
|
109 {
|
|
110
|
|
111 uInt a; /* counter for codes of length k */
|
|
112 uInt c[BMAX+1]; /* bit length count table */
|
|
113 uInt f; /* i repeats in table every f entries */
|
|
114 int g; /* maximum code length */
|
|
115 int h; /* table level */
|
|
116 register uInt i; /* counter, current code */
|
|
117 register uInt j; /* counter */
|
|
118 register int k; /* number of bits in current code */
|
|
119 int l; /* bits per table (returned in m) */
|
|
120 register uIntf *p; /* pointer into c[], b[], or v[] */
|
|
121 inflate_huft *q; /* points to current table */
|
|
122 struct inflate_huft_s r; /* table entry for structure assignment */
|
|
123 inflate_huft *u[BMAX]; /* table stack */
|
|
124 uInt v[N_MAX]; /* values in order of bit length */
|
|
125 register int w; /* bits before this table == (l * h) */
|
|
126 uInt x[BMAX+1]; /* bit offsets, then code stack */
|
|
127 uIntf *xp; /* pointer into x */
|
|
128 int y; /* number of dummy codes added */
|
|
129 uInt z; /* number of entries in current table */
|
|
130
|
|
131
|
|
132 /* Generate counts for each bit length */
|
|
133 p = c;
|
|
134 #define C0 *p++ = 0;
|
|
135 #define C2 C0 C0 C0 C0
|
|
136 #define C4 C2 C2 C2 C2
|
|
137 C4 /* clear c[]--assume BMAX+1 is 16 */
|
|
138 p = b; i = n;
|
|
139 do {
|
|
140 c[*p++]++; /* assume all entries <= BMAX */
|
|
141 } while (--i);
|
|
142 if (c[0] == n) /* null input--all zero length codes */
|
|
143 {
|
|
144 *t = (inflate_huft *)Z_NULL;
|
|
145 *m = 0;
|
|
146 return Z_OK;
|
|
147 }
|
|
148
|
|
149
|
|
150 /* Find minimum and maximum length, bound *m by those */
|
|
151 l = *m;
|
|
152 for (j = 1; j <= BMAX; j++)
|
|
153 if (c[j])
|
|
154 break;
|
|
155 k = j; /* minimum code length */
|
|
156 if ((uInt)l < j)
|
|
157 l = j;
|
|
158 for (i = BMAX; i; i--)
|
|
159 if (c[i])
|
|
160 break;
|
|
161 g = i; /* maximum code length */
|
|
162 if ((uInt)l > i)
|
|
163 l = i;
|
|
164 *m = l;
|
|
165
|
|
166
|
|
167 /* Adjust last length count to fill out codes, if needed */
|
|
168 for (y = 1 << j; j < i; j++, y <<= 1)
|
|
169 if ((y -= c[j]) < 0)
|
|
170 return Z_DATA_ERROR;
|
|
171 if ((y -= c[i]) < 0)
|
|
172 return Z_DATA_ERROR;
|
|
173 c[i] += y;
|
|
174
|
|
175
|
|
176 /* Generate starting offsets into the value table for each length */
|
|
177 x[1] = j = 0;
|
|
178 p = c + 1; xp = x + 2;
|
|
179 while (--i) { /* note that i == g from above */
|
|
180 *xp++ = (j += *p++);
|
|
181 }
|
|
182
|
|
183
|
|
184 /* Make a table of values in order of bit lengths */
|
|
185 p = b; i = 0;
|
|
186 do {
|
|
187 if ((j = *p++) != 0)
|
|
188 v[x[j]++] = i;
|
|
189 } while (++i < n);
|
|
190
|
|
191
|
|
192 /* Generate the Huffman codes and for each, make the table entries */
|
|
193 x[0] = i = 0; /* first Huffman code is zero */
|
|
194 p = v; /* grab values in bit order */
|
|
195 h = -1; /* no tables yet--level -1 */
|
|
196 w = -l; /* bits decoded == (l * h) */
|
|
197 u[0] = (inflate_huft *)Z_NULL; /* just to keep compilers happy */
|
|
198 q = (inflate_huft *)Z_NULL; /* ditto */
|
|
199 z = 0; /* ditto */
|
|
200
|
|
201 /* go through the bit lengths (k already is bits in shortest code) */
|
|
202 for (; k <= g; k++)
|
|
203 {
|
|
204 a = c[k];
|
|
205 while (a--)
|
|
206 {
|
|
207 /* here i is the Huffman code of length k bits for value *p */
|
|
208 /* make tables up to required level */
|
|
209 while (k > w + l)
|
|
210 {
|
|
211 h++;
|
|
212 w += l; /* previous table always l bits */
|
|
213
|
|
214 /* compute minimum size table less than or equal to l bits */
|
|
215 z = (z = g - w) > (uInt)l ? l : z; /* table size upper limit */
|
|
216 if ((f = 1 << (j = k - w)) > a + 1) /* try a k-w bit table */
|
|
217 { /* too few codes for k-w bit table */
|
|
218 f -= a + 1; /* deduct codes from patterns left */
|
|
219 xp = c + k;
|
|
220 if (j < z)
|
|
221 while (++j < z) /* try smaller tables up to z bits */
|
|
222 {
|
|
223 if ((f <<= 1) <= *++xp)
|
|
224 break; /* enough codes to use up j bits */
|
|
225 f -= *xp; /* else deduct codes from patterns */
|
|
226 }
|
|
227 }
|
|
228 z = 1 << j; /* table entries for j-bit table */
|
|
229
|
|
230 /* allocate and link in new table */
|
|
231 if ((q = (inflate_huft *)ZALLOC
|
|
232 (zs,z + 1,sizeof(inflate_huft))) == Z_NULL)
|
|
233 {
|
|
234 if (h)
|
|
235 inflate_trees_free(u[0], zs);
|
|
236 return Z_MEM_ERROR; /* not enough memory */
|
|
237 }
|
|
238 #ifdef DEBUG
|
|
239 inflate_hufts += z + 1;
|
|
240 #endif
|
|
241 *t = q + 1; /* link to list for huft_free() */
|
|
242 *(t = &(q->next)) = Z_NULL;
|
|
243 u[h] = ++q; /* table starts after link */
|
|
244
|
|
245 /* connect to last table, if there is one */
|
|
246 if (h)
|
|
247 {
|
|
248 x[h] = i; /* save pattern for backing up */
|
|
249 r.bits = (Byte)l; /* bits to dump before this table */
|
|
250 r.exop = (Byte)j; /* bits in this table */
|
|
251 r.next = q; /* pointer to this table */
|
|
252 j = i >> (w - l); /* (get around Turbo C bug) */
|
|
253 u[h-1][j] = r; /* connect to last table */
|
|
254 }
|
|
255 }
|
|
256
|
|
257 /* set up table entry in r */
|
|
258 r.bits = (Byte)(k - w);
|
|
259 if (p >= v + n)
|
|
260 r.exop = 128 + 64; /* out of values--invalid code */
|
|
261 else if (*p < s)
|
|
262 {
|
|
263 r.exop = (Byte)(*p < 256 ? 0 : 32 + 64); /* 256 is end-of-block */
|
|
264 r.base = *p++; /* simple code is just the value */
|
|
265 }
|
|
266 else
|
|
267 {
|
|
268 r.exop = (Byte)e[*p - s] + 16 + 64; /* non-simple--look up in lists */
|
|
269 r.base = d[*p++ - s];
|
|
270 }
|
|
271
|
|
272 /* fill code-like entries with r */
|
|
273 f = 1 << (k - w);
|
|
274 for (j = i >> w; j < z; j += f)
|
|
275 q[j] = r;
|
|
276
|
|
277 /* backwards increment the k-bit code i */
|
|
278 for (j = 1 << (k - 1); i & j; j >>= 1)
|
|
279 i ^= j;
|
|
280 i ^= j;
|
|
281
|
|
282 /* backup over finished tables */
|
|
283 while ((i & ((1 << w) - 1)) != x[h])
|
|
284 {
|
|
285 h--; /* don't need to update q */
|
|
286 w -= l;
|
|
287 }
|
|
288 }
|
|
289 }
|
|
290
|
|
291
|
|
292 /* Return Z_BUF_ERROR if we were given an incomplete table */
|
|
293 return y != 0 && g != 1 ? Z_BUF_ERROR : Z_OK;
|
|
294 }
|
|
295
|
|
296
|
|
297 int inflate_trees_bits(c, bb, tb, z)
|
|
298 uIntf *c; /* 19 code lengths */
|
|
299 uIntf *bb; /* bits tree desired/actual depth */
|
|
300 inflate_huft * FAR *tb; /* bits tree result */
|
|
301 z_stream *z; /* for zfree function */
|
|
302 {
|
|
303 int r;
|
|
304
|
|
305 r = huft_build(c, 19, 19, (uIntf*)Z_NULL, (uIntf*)Z_NULL, tb, bb, z);
|
|
306 if (r == Z_DATA_ERROR)
|
|
307 z->msg = "oversubscribed dynamic bit lengths tree";
|
|
308 else if (r == Z_BUF_ERROR)
|
|
309 {
|
|
310 inflate_trees_free(*tb, z);
|
|
311 z->msg = "incomplete dynamic bit lengths tree";
|
|
312 r = Z_DATA_ERROR;
|
|
313 }
|
|
314 return r;
|
|
315 }
|
|
316
|
|
317
|
|
318 int inflate_trees_dynamic(nl, nd, c, bl, bd, tl, td, z)
|
|
319 uInt nl; /* number of literal/length codes */
|
|
320 uInt nd; /* number of distance codes */
|
|
321 uIntf *c; /* that many (total) code lengths */
|
|
322 uIntf *bl; /* literal desired/actual bit depth */
|
|
323 uIntf *bd; /* distance desired/actual bit depth */
|
|
324 inflate_huft * FAR *tl; /* literal/length tree result */
|
|
325 inflate_huft * FAR *td; /* distance tree result */
|
|
326 z_stream *z; /* for zfree function */
|
|
327 {
|
|
328 int r;
|
|
329
|
|
330 /* build literal/length tree */
|
|
331 if ((r = huft_build(c, nl, 257, cplens, cplext, tl, bl, z)) != Z_OK)
|
|
332 {
|
|
333 if (r == Z_DATA_ERROR)
|
|
334 z->msg = "oversubscribed literal/length tree";
|
|
335 else if (r == Z_BUF_ERROR)
|
|
336 {
|
|
337 inflate_trees_free(*tl, z);
|
|
338 z->msg = "incomplete literal/length tree";
|
|
339 r = Z_DATA_ERROR;
|
|
340 }
|
|
341 return r;
|
|
342 }
|
|
343
|
|
344 /* build distance tree */
|
|
345 if ((r = huft_build(c + nl, nd, 0, cpdist, cpdext, td, bd, z)) != Z_OK)
|
|
346 {
|
|
347 if (r == Z_DATA_ERROR)
|
|
348 z->msg = "oversubscribed literal/length tree";
|
|
349 else if (r == Z_BUF_ERROR) {
|
|
350 #ifdef PKZIP_BUG_WORKAROUND
|
|
351 r = Z_OK;
|
|
352 }
|
|
353 #else
|
|
354 inflate_trees_free(*td, z);
|
|
355 z->msg = "incomplete literal/length tree";
|
|
356 r = Z_DATA_ERROR;
|
|
357 }
|
|
358 inflate_trees_free(*tl, z);
|
|
359 return r;
|
|
360 #endif
|
|
361 }
|
|
362
|
|
363 /* done */
|
|
364 return Z_OK;
|
|
365 }
|
|
366
|
|
367
|
|
368 /* build fixed tables only once--keep them here */
|
|
369 local int fixed_lock = 0;
|
|
370 local int fixed_built = 0;
|
|
371 #define FIXEDH 530 /* number of hufts used by fixed tables */
|
|
372 local uInt fixed_left = FIXEDH;
|
|
373 local inflate_huft fixed_mem[FIXEDH];
|
|
374 local uInt fixed_bl;
|
|
375 local uInt fixed_bd;
|
|
376 local inflate_huft *fixed_tl;
|
|
377 local inflate_huft *fixed_td;
|
|
378
|
|
379
|
|
380 local voidpf falloc(q, n, s)
|
|
381 voidpf q; /* opaque pointer (not used) */
|
|
382 uInt n; /* number of items */
|
|
383 uInt s; /* size of item */
|
|
384 {
|
|
385 Assert(s == sizeof(inflate_huft) && n <= fixed_left,
|
|
386 "inflate_trees falloc overflow");
|
|
387 if (q) s++; /* to make some compilers happy */
|
|
388 fixed_left -= n;
|
|
389 return (voidpf)(fixed_mem + fixed_left);
|
|
390 }
|
|
391
|
|
392
|
|
393 local void ffree(q, p)
|
|
394 voidpf q;
|
|
395 voidpf p;
|
|
396 {
|
|
397 Assert(0, "inflate_trees ffree called!");
|
|
398 if (q) q = p; /* to make some compilers happy */
|
|
399 }
|
|
400
|
|
401
|
|
402 int inflate_trees_fixed(bl, bd, tl, td)
|
|
403 uIntf *bl; /* literal desired/actual bit depth */
|
|
404 uIntf *bd; /* distance desired/actual bit depth */
|
|
405 inflate_huft * FAR *tl; /* literal/length tree result */
|
|
406 inflate_huft * FAR *td; /* distance tree result */
|
|
407 {
|
|
408 /* build fixed tables if not built already--lock out other instances */
|
|
409 while (++fixed_lock > 1)
|
|
410 fixed_lock--;
|
|
411 if (!fixed_built)
|
|
412 {
|
|
413 int k; /* temporary variable */
|
|
414 unsigned c[288]; /* length list for huft_build */
|
|
415 z_stream z; /* for falloc function */
|
|
416
|
|
417 /* set up fake z_stream for memory routines */
|
|
418 z.zalloc = falloc;
|
|
419 z.zfree = ffree;
|
|
420 z.opaque = Z_NULL;
|
|
421
|
|
422 /* literal table */
|
|
423 for (k = 0; k < 144; k++)
|
|
424 c[k] = 8;
|
|
425 for (; k < 256; k++)
|
|
426 c[k] = 9;
|
|
427 for (; k < 280; k++)
|
|
428 c[k] = 7;
|
|
429 for (; k < 288; k++)
|
|
430 c[k] = 8;
|
|
431 fixed_bl = 7;
|
|
432 huft_build(c, 288, 257, cplens, cplext, &fixed_tl, &fixed_bl, &z);
|
|
433
|
|
434 /* distance table */
|
|
435 for (k = 0; k < 30; k++)
|
|
436 c[k] = 5;
|
|
437 fixed_bd = 5;
|
|
438 huft_build(c, 30, 0, cpdist, cpdext, &fixed_td, &fixed_bd, &z);
|
|
439
|
|
440 /* done */
|
|
441 fixed_built = 1;
|
|
442 }
|
|
443 fixed_lock--;
|
|
444 *bl = fixed_bl;
|
|
445 *bd = fixed_bd;
|
|
446 *tl = fixed_tl;
|
|
447 *td = fixed_td;
|
|
448 return Z_OK;
|
|
449 }
|
|
450
|
|
451
|
|
452 int inflate_trees_free(t, z)
|
|
453 inflate_huft *t; /* table to free */
|
|
454 z_stream *z; /* for zfree function */
|
|
455 /* Free the malloc'ed tables built by huft_build(), which makes a linked
|
|
456 list of the tables it made, with the links in a dummy first entry of
|
|
457 each table. */
|
|
458 {
|
|
459 register inflate_huft *p, *q;
|
|
460
|
|
461 /* Go through linked list, freeing from the malloced (t[-1]) address. */
|
|
462 p = t;
|
|
463 while (p != Z_NULL)
|
|
464 {
|
|
465 q = (--p)->next;
|
|
466 ZFREE(z,p);
|
|
467 p = q;
|
|
468 }
|
|
469 return Z_OK;
|
|
470 }
|