| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 1 | /* +++ trees.c */ | 
|  | 2 | /* trees.c -- output deflated data using Huffman coding | 
|  | 3 | * Copyright (C) 1995-1996 Jean-loup Gailly | 
|  | 4 | * For conditions of distribution and use, see copyright notice in zlib.h | 
|  | 5 | */ | 
|  | 6 |  | 
|  | 7 | /* | 
|  | 8 | *  ALGORITHM | 
|  | 9 | * | 
|  | 10 | *      The "deflation" process uses several Huffman trees. The more | 
|  | 11 | *      common source values are represented by shorter bit sequences. | 
|  | 12 | * | 
|  | 13 | *      Each code tree is stored in a compressed form which is itself | 
|  | 14 | * a Huffman encoding of the lengths of all the code strings (in | 
|  | 15 | * ascending order by source values).  The actual code strings are | 
|  | 16 | * reconstructed from the lengths in the inflate process, as described | 
|  | 17 | * in the deflate specification. | 
|  | 18 | * | 
|  | 19 | *  REFERENCES | 
|  | 20 | * | 
|  | 21 | *      Deutsch, L.P.,"'Deflate' Compressed Data Format Specification". | 
|  | 22 | *      Available in ftp.uu.net:/pub/archiving/zip/doc/deflate-1.1.doc | 
|  | 23 | * | 
|  | 24 | *      Storer, James A. | 
|  | 25 | *          Data Compression:  Methods and Theory, pp. 49-50. | 
|  | 26 | *          Computer Science Press, 1988.  ISBN 0-7167-8156-5. | 
|  | 27 | * | 
|  | 28 | *      Sedgewick, R. | 
|  | 29 | *          Algorithms, p290. | 
|  | 30 | *          Addison-Wesley, 1983. ISBN 0-201-06672-6. | 
|  | 31 | */ | 
|  | 32 |  | 
|  | 33 | /* From: trees.c,v 1.11 1996/07/24 13:41:06 me Exp $ */ | 
|  | 34 |  | 
|  | 35 | /* #include "deflate.h" */ | 
|  | 36 |  | 
|  | 37 | #include <linux/zutil.h> | 
|  | 38 | #include "defutil.h" | 
|  | 39 |  | 
|  | 40 | #ifdef DEBUG_ZLIB | 
|  | 41 | #  include <ctype.h> | 
|  | 42 | #endif | 
|  | 43 |  | 
|  | 44 | /* =========================================================================== | 
|  | 45 | * Constants | 
|  | 46 | */ | 
|  | 47 |  | 
|  | 48 | #define MAX_BL_BITS 7 | 
|  | 49 | /* Bit length codes must not exceed MAX_BL_BITS bits */ | 
|  | 50 |  | 
|  | 51 | #define END_BLOCK 256 | 
|  | 52 | /* end of block literal code */ | 
|  | 53 |  | 
|  | 54 | #define REP_3_6      16 | 
|  | 55 | /* repeat previous bit length 3-6 times (2 bits of repeat count) */ | 
|  | 56 |  | 
|  | 57 | #define REPZ_3_10    17 | 
|  | 58 | /* repeat a zero length 3-10 times  (3 bits of repeat count) */ | 
|  | 59 |  | 
|  | 60 | #define REPZ_11_138  18 | 
|  | 61 | /* repeat a zero length 11-138 times  (7 bits of repeat count) */ | 
|  | 62 |  | 
|  | 63 | static const int extra_lbits[LENGTH_CODES] /* extra bits for each length code */ | 
|  | 64 | = {0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0}; | 
|  | 65 |  | 
|  | 66 | static const int extra_dbits[D_CODES] /* extra bits for each distance code */ | 
|  | 67 | = {0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13}; | 
|  | 68 |  | 
|  | 69 | static const int extra_blbits[BL_CODES]/* extra bits for each bit length code */ | 
|  | 70 | = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7}; | 
|  | 71 |  | 
|  | 72 | static const uch bl_order[BL_CODES] | 
|  | 73 | = {16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15}; | 
|  | 74 | /* The lengths of the bit length codes are sent in order of decreasing | 
|  | 75 | * probability, to avoid transmitting the lengths for unused bit length codes. | 
|  | 76 | */ | 
|  | 77 |  | 
|  | 78 | #define Buf_size (8 * 2*sizeof(char)) | 
|  | 79 | /* Number of bits used within bi_buf. (bi_buf might be implemented on | 
|  | 80 | * more than 16 bits on some systems.) | 
|  | 81 | */ | 
|  | 82 |  | 
|  | 83 | /* =========================================================================== | 
|  | 84 | * Local data. These are initialized only once. | 
|  | 85 | */ | 
|  | 86 |  | 
|  | 87 | static ct_data static_ltree[L_CODES+2]; | 
|  | 88 | /* The static literal tree. Since the bit lengths are imposed, there is no | 
|  | 89 | * need for the L_CODES extra codes used during heap construction. However | 
|  | 90 | * The codes 286 and 287 are needed to build a canonical tree (see zlib_tr_init | 
|  | 91 | * below). | 
|  | 92 | */ | 
|  | 93 |  | 
|  | 94 | static ct_data static_dtree[D_CODES]; | 
|  | 95 | /* The static distance tree. (Actually a trivial tree since all codes use | 
|  | 96 | * 5 bits.) | 
|  | 97 | */ | 
|  | 98 |  | 
|  | 99 | static uch dist_code[512]; | 
|  | 100 | /* distance codes. The first 256 values correspond to the distances | 
|  | 101 | * 3 .. 258, the last 256 values correspond to the top 8 bits of | 
|  | 102 | * the 15 bit distances. | 
|  | 103 | */ | 
|  | 104 |  | 
|  | 105 | static uch length_code[MAX_MATCH-MIN_MATCH+1]; | 
|  | 106 | /* length code for each normalized match length (0 == MIN_MATCH) */ | 
|  | 107 |  | 
|  | 108 | static int base_length[LENGTH_CODES]; | 
|  | 109 | /* First normalized length for each code (0 = MIN_MATCH) */ | 
|  | 110 |  | 
|  | 111 | static int base_dist[D_CODES]; | 
|  | 112 | /* First normalized distance for each code (0 = distance of 1) */ | 
|  | 113 |  | 
|  | 114 | struct static_tree_desc_s { | 
|  | 115 | const ct_data *static_tree;  /* static tree or NULL */ | 
|  | 116 | const int *extra_bits;       /* extra bits for each code or NULL */ | 
|  | 117 | int     extra_base;          /* base index for extra_bits */ | 
|  | 118 | int     elems;               /* max number of elements in the tree */ | 
|  | 119 | int     max_length;          /* max bit length for the codes */ | 
|  | 120 | }; | 
|  | 121 |  | 
|  | 122 | static static_tree_desc  static_l_desc = | 
|  | 123 | {static_ltree, extra_lbits, LITERALS+1, L_CODES, MAX_BITS}; | 
|  | 124 |  | 
|  | 125 | static static_tree_desc  static_d_desc = | 
|  | 126 | {static_dtree, extra_dbits, 0,          D_CODES, MAX_BITS}; | 
|  | 127 |  | 
|  | 128 | static static_tree_desc  static_bl_desc = | 
|  | 129 | {(const ct_data *)0, extra_blbits, 0,   BL_CODES, MAX_BL_BITS}; | 
|  | 130 |  | 
|  | 131 | /* =========================================================================== | 
|  | 132 | * Local (static) routines in this file. | 
|  | 133 | */ | 
|  | 134 |  | 
|  | 135 | static void tr_static_init (void); | 
|  | 136 | static void init_block     (deflate_state *s); | 
|  | 137 | static void pqdownheap     (deflate_state *s, ct_data *tree, int k); | 
|  | 138 | static void gen_bitlen     (deflate_state *s, tree_desc *desc); | 
|  | 139 | static void gen_codes      (ct_data *tree, int max_code, ush *bl_count); | 
|  | 140 | static void build_tree     (deflate_state *s, tree_desc *desc); | 
|  | 141 | static void scan_tree      (deflate_state *s, ct_data *tree, int max_code); | 
|  | 142 | static void send_tree      (deflate_state *s, ct_data *tree, int max_code); | 
|  | 143 | static int  build_bl_tree  (deflate_state *s); | 
|  | 144 | static void send_all_trees (deflate_state *s, int lcodes, int dcodes, | 
|  | 145 | int blcodes); | 
|  | 146 | static void compress_block (deflate_state *s, ct_data *ltree, | 
|  | 147 | ct_data *dtree); | 
|  | 148 | static void set_data_type  (deflate_state *s); | 
|  | 149 | static unsigned bi_reverse (unsigned value, int length); | 
|  | 150 | static void bi_windup      (deflate_state *s); | 
|  | 151 | static void bi_flush       (deflate_state *s); | 
|  | 152 | static void copy_block     (deflate_state *s, char *buf, unsigned len, | 
|  | 153 | int header); | 
|  | 154 |  | 
|  | 155 | #ifndef DEBUG_ZLIB | 
|  | 156 | #  define send_code(s, c, tree) send_bits(s, tree[c].Code, tree[c].Len) | 
|  | 157 | /* Send a code of the given tree. c and tree must not have side effects */ | 
|  | 158 |  | 
|  | 159 | #else /* DEBUG_ZLIB */ | 
|  | 160 | #  define send_code(s, c, tree) \ | 
|  | 161 | { if (z_verbose>2) fprintf(stderr,"\ncd %3d ",(c)); \ | 
|  | 162 | send_bits(s, tree[c].Code, tree[c].Len); } | 
|  | 163 | #endif | 
|  | 164 |  | 
|  | 165 | #define d_code(dist) \ | 
|  | 166 | ((dist) < 256 ? dist_code[dist] : dist_code[256+((dist)>>7)]) | 
|  | 167 | /* Mapping from a distance to a distance code. dist is the distance - 1 and | 
|  | 168 | * must not have side effects. dist_code[256] and dist_code[257] are never | 
|  | 169 | * used. | 
|  | 170 | */ | 
|  | 171 |  | 
|  | 172 | /* =========================================================================== | 
|  | 173 | * Send a value on a given number of bits. | 
|  | 174 | * IN assertion: length <= 16 and value fits in length bits. | 
|  | 175 | */ | 
|  | 176 | #ifdef DEBUG_ZLIB | 
|  | 177 | static void send_bits      (deflate_state *s, int value, int length); | 
|  | 178 |  | 
|  | 179 | static void send_bits( | 
|  | 180 | deflate_state *s, | 
|  | 181 | int value,  /* value to send */ | 
|  | 182 | int length  /* number of bits */ | 
|  | 183 | ) | 
|  | 184 | { | 
|  | 185 | Tracevv((stderr," l %2d v %4x ", length, value)); | 
|  | 186 | Assert(length > 0 && length <= 15, "invalid length"); | 
|  | 187 | s->bits_sent += (ulg)length; | 
|  | 188 |  | 
|  | 189 | /* If not enough room in bi_buf, use (valid) bits from bi_buf and | 
|  | 190 | * (16 - bi_valid) bits from value, leaving (width - (16-bi_valid)) | 
|  | 191 | * unused bits in value. | 
|  | 192 | */ | 
|  | 193 | if (s->bi_valid > (int)Buf_size - length) { | 
|  | 194 | s->bi_buf |= (value << s->bi_valid); | 
|  | 195 | put_short(s, s->bi_buf); | 
|  | 196 | s->bi_buf = (ush)value >> (Buf_size - s->bi_valid); | 
|  | 197 | s->bi_valid += length - Buf_size; | 
|  | 198 | } else { | 
|  | 199 | s->bi_buf |= value << s->bi_valid; | 
|  | 200 | s->bi_valid += length; | 
|  | 201 | } | 
|  | 202 | } | 
|  | 203 | #else /* !DEBUG_ZLIB */ | 
|  | 204 |  | 
|  | 205 | #define send_bits(s, value, length) \ | 
|  | 206 | { int len = length;\ | 
|  | 207 | if (s->bi_valid > (int)Buf_size - len) {\ | 
|  | 208 | int val = value;\ | 
|  | 209 | s->bi_buf |= (val << s->bi_valid);\ | 
|  | 210 | put_short(s, s->bi_buf);\ | 
|  | 211 | s->bi_buf = (ush)val >> (Buf_size - s->bi_valid);\ | 
|  | 212 | s->bi_valid += len - Buf_size;\ | 
|  | 213 | } else {\ | 
|  | 214 | s->bi_buf |= (value) << s->bi_valid;\ | 
|  | 215 | s->bi_valid += len;\ | 
|  | 216 | }\ | 
|  | 217 | } | 
|  | 218 | #endif /* DEBUG_ZLIB */ | 
|  | 219 |  | 
|  | 220 | /* =========================================================================== | 
|  | 221 | * Initialize the various 'constant' tables. In a multi-threaded environment, | 
|  | 222 | * this function may be called by two threads concurrently, but this is | 
|  | 223 | * harmless since both invocations do exactly the same thing. | 
|  | 224 | */ | 
|  | 225 | static void tr_static_init(void) | 
|  | 226 | { | 
|  | 227 | static int static_init_done; | 
|  | 228 | int n;        /* iterates over tree elements */ | 
|  | 229 | int bits;     /* bit counter */ | 
|  | 230 | int length;   /* length value */ | 
|  | 231 | int code;     /* code value */ | 
|  | 232 | int dist;     /* distance index */ | 
|  | 233 | ush bl_count[MAX_BITS+1]; | 
|  | 234 | /* number of codes at each bit length for an optimal tree */ | 
|  | 235 |  | 
|  | 236 | if (static_init_done) return; | 
|  | 237 |  | 
|  | 238 | /* Initialize the mapping length (0..255) -> length code (0..28) */ | 
|  | 239 | length = 0; | 
|  | 240 | for (code = 0; code < LENGTH_CODES-1; code++) { | 
|  | 241 | base_length[code] = length; | 
|  | 242 | for (n = 0; n < (1<<extra_lbits[code]); n++) { | 
|  | 243 | length_code[length++] = (uch)code; | 
|  | 244 | } | 
|  | 245 | } | 
|  | 246 | Assert (length == 256, "tr_static_init: length != 256"); | 
|  | 247 | /* Note that the length 255 (match length 258) can be represented | 
|  | 248 | * in two different ways: code 284 + 5 bits or code 285, so we | 
|  | 249 | * overwrite length_code[255] to use the best encoding: | 
|  | 250 | */ | 
|  | 251 | length_code[length-1] = (uch)code; | 
|  | 252 |  | 
|  | 253 | /* Initialize the mapping dist (0..32K) -> dist code (0..29) */ | 
|  | 254 | dist = 0; | 
|  | 255 | for (code = 0 ; code < 16; code++) { | 
|  | 256 | base_dist[code] = dist; | 
|  | 257 | for (n = 0; n < (1<<extra_dbits[code]); n++) { | 
|  | 258 | dist_code[dist++] = (uch)code; | 
|  | 259 | } | 
|  | 260 | } | 
|  | 261 | Assert (dist == 256, "tr_static_init: dist != 256"); | 
|  | 262 | dist >>= 7; /* from now on, all distances are divided by 128 */ | 
|  | 263 | for ( ; code < D_CODES; code++) { | 
|  | 264 | base_dist[code] = dist << 7; | 
|  | 265 | for (n = 0; n < (1<<(extra_dbits[code]-7)); n++) { | 
|  | 266 | dist_code[256 + dist++] = (uch)code; | 
|  | 267 | } | 
|  | 268 | } | 
|  | 269 | Assert (dist == 256, "tr_static_init: 256+dist != 512"); | 
|  | 270 |  | 
|  | 271 | /* Construct the codes of the static literal tree */ | 
|  | 272 | for (bits = 0; bits <= MAX_BITS; bits++) bl_count[bits] = 0; | 
|  | 273 | n = 0; | 
|  | 274 | while (n <= 143) static_ltree[n++].Len = 8, bl_count[8]++; | 
|  | 275 | while (n <= 255) static_ltree[n++].Len = 9, bl_count[9]++; | 
|  | 276 | while (n <= 279) static_ltree[n++].Len = 7, bl_count[7]++; | 
|  | 277 | while (n <= 287) static_ltree[n++].Len = 8, bl_count[8]++; | 
|  | 278 | /* Codes 286 and 287 do not exist, but we must include them in the | 
|  | 279 | * tree construction to get a canonical Huffman tree (longest code | 
|  | 280 | * all ones) | 
|  | 281 | */ | 
|  | 282 | gen_codes((ct_data *)static_ltree, L_CODES+1, bl_count); | 
|  | 283 |  | 
|  | 284 | /* The static distance tree is trivial: */ | 
|  | 285 | for (n = 0; n < D_CODES; n++) { | 
|  | 286 | static_dtree[n].Len = 5; | 
|  | 287 | static_dtree[n].Code = bi_reverse((unsigned)n, 5); | 
|  | 288 | } | 
|  | 289 | static_init_done = 1; | 
|  | 290 | } | 
|  | 291 |  | 
|  | 292 | /* =========================================================================== | 
|  | 293 | * Initialize the tree data structures for a new zlib stream. | 
|  | 294 | */ | 
|  | 295 | void zlib_tr_init( | 
|  | 296 | deflate_state *s | 
|  | 297 | ) | 
|  | 298 | { | 
|  | 299 | tr_static_init(); | 
|  | 300 |  | 
|  | 301 | s->compressed_len = 0L; | 
|  | 302 |  | 
|  | 303 | s->l_desc.dyn_tree = s->dyn_ltree; | 
|  | 304 | s->l_desc.stat_desc = &static_l_desc; | 
|  | 305 |  | 
|  | 306 | s->d_desc.dyn_tree = s->dyn_dtree; | 
|  | 307 | s->d_desc.stat_desc = &static_d_desc; | 
|  | 308 |  | 
|  | 309 | s->bl_desc.dyn_tree = s->bl_tree; | 
|  | 310 | s->bl_desc.stat_desc = &static_bl_desc; | 
|  | 311 |  | 
|  | 312 | s->bi_buf = 0; | 
|  | 313 | s->bi_valid = 0; | 
|  | 314 | s->last_eob_len = 8; /* enough lookahead for inflate */ | 
|  | 315 | #ifdef DEBUG_ZLIB | 
|  | 316 | s->bits_sent = 0L; | 
|  | 317 | #endif | 
|  | 318 |  | 
|  | 319 | /* Initialize the first block of the first file: */ | 
|  | 320 | init_block(s); | 
|  | 321 | } | 
|  | 322 |  | 
|  | 323 | /* =========================================================================== | 
|  | 324 | * Initialize a new block. | 
|  | 325 | */ | 
|  | 326 | static void init_block( | 
|  | 327 | deflate_state *s | 
|  | 328 | ) | 
|  | 329 | { | 
|  | 330 | int n; /* iterates over tree elements */ | 
|  | 331 |  | 
|  | 332 | /* Initialize the trees. */ | 
|  | 333 | for (n = 0; n < L_CODES;  n++) s->dyn_ltree[n].Freq = 0; | 
|  | 334 | for (n = 0; n < D_CODES;  n++) s->dyn_dtree[n].Freq = 0; | 
|  | 335 | for (n = 0; n < BL_CODES; n++) s->bl_tree[n].Freq = 0; | 
|  | 336 |  | 
|  | 337 | s->dyn_ltree[END_BLOCK].Freq = 1; | 
|  | 338 | s->opt_len = s->static_len = 0L; | 
|  | 339 | s->last_lit = s->matches = 0; | 
|  | 340 | } | 
|  | 341 |  | 
|  | 342 | #define SMALLEST 1 | 
|  | 343 | /* Index within the heap array of least frequent node in the Huffman tree */ | 
|  | 344 |  | 
|  | 345 |  | 
|  | 346 | /* =========================================================================== | 
|  | 347 | * Remove the smallest element from the heap and recreate the heap with | 
|  | 348 | * one less element. Updates heap and heap_len. | 
|  | 349 | */ | 
|  | 350 | #define pqremove(s, tree, top) \ | 
|  | 351 | {\ | 
|  | 352 | top = s->heap[SMALLEST]; \ | 
|  | 353 | s->heap[SMALLEST] = s->heap[s->heap_len--]; \ | 
|  | 354 | pqdownheap(s, tree, SMALLEST); \ | 
|  | 355 | } | 
|  | 356 |  | 
|  | 357 | /* =========================================================================== | 
|  | 358 | * Compares to subtrees, using the tree depth as tie breaker when | 
|  | 359 | * the subtrees have equal frequency. This minimizes the worst case length. | 
|  | 360 | */ | 
|  | 361 | #define smaller(tree, n, m, depth) \ | 
|  | 362 | (tree[n].Freq < tree[m].Freq || \ | 
|  | 363 | (tree[n].Freq == tree[m].Freq && depth[n] <= depth[m])) | 
|  | 364 |  | 
|  | 365 | /* =========================================================================== | 
|  | 366 | * Restore the heap property by moving down the tree starting at node k, | 
|  | 367 | * exchanging a node with the smallest of its two sons if necessary, stopping | 
|  | 368 | * when the heap property is re-established (each father smaller than its | 
|  | 369 | * two sons). | 
|  | 370 | */ | 
|  | 371 | static void pqdownheap( | 
|  | 372 | deflate_state *s, | 
|  | 373 | ct_data *tree,  /* the tree to restore */ | 
|  | 374 | int k		/* node to move down */ | 
|  | 375 | ) | 
|  | 376 | { | 
|  | 377 | int v = s->heap[k]; | 
|  | 378 | int j = k << 1;  /* left son of k */ | 
|  | 379 | while (j <= s->heap_len) { | 
|  | 380 | /* Set j to the smallest of the two sons: */ | 
|  | 381 | if (j < s->heap_len && | 
|  | 382 | smaller(tree, s->heap[j+1], s->heap[j], s->depth)) { | 
|  | 383 | j++; | 
|  | 384 | } | 
|  | 385 | /* Exit if v is smaller than both sons */ | 
|  | 386 | if (smaller(tree, v, s->heap[j], s->depth)) break; | 
|  | 387 |  | 
|  | 388 | /* Exchange v with the smallest son */ | 
|  | 389 | s->heap[k] = s->heap[j];  k = j; | 
|  | 390 |  | 
|  | 391 | /* And continue down the tree, setting j to the left son of k */ | 
|  | 392 | j <<= 1; | 
|  | 393 | } | 
|  | 394 | s->heap[k] = v; | 
|  | 395 | } | 
|  | 396 |  | 
|  | 397 | /* =========================================================================== | 
|  | 398 | * Compute the optimal bit lengths for a tree and update the total bit length | 
|  | 399 | * for the current block. | 
|  | 400 | * IN assertion: the fields freq and dad are set, heap[heap_max] and | 
|  | 401 | *    above are the tree nodes sorted by increasing frequency. | 
|  | 402 | * OUT assertions: the field len is set to the optimal bit length, the | 
|  | 403 | *     array bl_count contains the frequencies for each bit length. | 
|  | 404 | *     The length opt_len is updated; static_len is also updated if stree is | 
|  | 405 | *     not null. | 
|  | 406 | */ | 
|  | 407 | static void gen_bitlen( | 
|  | 408 | deflate_state *s, | 
|  | 409 | tree_desc *desc    /* the tree descriptor */ | 
|  | 410 | ) | 
|  | 411 | { | 
|  | 412 | ct_data *tree        = desc->dyn_tree; | 
|  | 413 | int max_code         = desc->max_code; | 
|  | 414 | const ct_data *stree = desc->stat_desc->static_tree; | 
|  | 415 | const int *extra     = desc->stat_desc->extra_bits; | 
|  | 416 | int base             = desc->stat_desc->extra_base; | 
|  | 417 | int max_length       = desc->stat_desc->max_length; | 
|  | 418 | int h;              /* heap index */ | 
|  | 419 | int n, m;           /* iterate over the tree elements */ | 
|  | 420 | int bits;           /* bit length */ | 
|  | 421 | int xbits;          /* extra bits */ | 
|  | 422 | ush f;              /* frequency */ | 
|  | 423 | int overflow = 0;   /* number of elements with bit length too large */ | 
|  | 424 |  | 
|  | 425 | for (bits = 0; bits <= MAX_BITS; bits++) s->bl_count[bits] = 0; | 
|  | 426 |  | 
|  | 427 | /* In a first pass, compute the optimal bit lengths (which may | 
|  | 428 | * overflow in the case of the bit length tree). | 
|  | 429 | */ | 
|  | 430 | tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */ | 
|  | 431 |  | 
|  | 432 | for (h = s->heap_max+1; h < HEAP_SIZE; h++) { | 
|  | 433 | n = s->heap[h]; | 
|  | 434 | bits = tree[tree[n].Dad].Len + 1; | 
|  | 435 | if (bits > max_length) bits = max_length, overflow++; | 
|  | 436 | tree[n].Len = (ush)bits; | 
|  | 437 | /* We overwrite tree[n].Dad which is no longer needed */ | 
|  | 438 |  | 
|  | 439 | if (n > max_code) continue; /* not a leaf node */ | 
|  | 440 |  | 
|  | 441 | s->bl_count[bits]++; | 
|  | 442 | xbits = 0; | 
|  | 443 | if (n >= base) xbits = extra[n-base]; | 
|  | 444 | f = tree[n].Freq; | 
|  | 445 | s->opt_len += (ulg)f * (bits + xbits); | 
|  | 446 | if (stree) s->static_len += (ulg)f * (stree[n].Len + xbits); | 
|  | 447 | } | 
|  | 448 | if (overflow == 0) return; | 
|  | 449 |  | 
|  | 450 | Trace((stderr,"\nbit length overflow\n")); | 
|  | 451 | /* This happens for example on obj2 and pic of the Calgary corpus */ | 
|  | 452 |  | 
|  | 453 | /* Find the first bit length which could increase: */ | 
|  | 454 | do { | 
|  | 455 | bits = max_length-1; | 
|  | 456 | while (s->bl_count[bits] == 0) bits--; | 
|  | 457 | s->bl_count[bits]--;      /* move one leaf down the tree */ | 
|  | 458 | s->bl_count[bits+1] += 2; /* move one overflow item as its brother */ | 
|  | 459 | s->bl_count[max_length]--; | 
|  | 460 | /* The brother of the overflow item also moves one step up, | 
|  | 461 | * but this does not affect bl_count[max_length] | 
|  | 462 | */ | 
|  | 463 | overflow -= 2; | 
|  | 464 | } while (overflow > 0); | 
|  | 465 |  | 
|  | 466 | /* Now recompute all bit lengths, scanning in increasing frequency. | 
|  | 467 | * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all | 
|  | 468 | * lengths instead of fixing only the wrong ones. This idea is taken | 
|  | 469 | * from 'ar' written by Haruhiko Okumura.) | 
|  | 470 | */ | 
|  | 471 | for (bits = max_length; bits != 0; bits--) { | 
|  | 472 | n = s->bl_count[bits]; | 
|  | 473 | while (n != 0) { | 
|  | 474 | m = s->heap[--h]; | 
|  | 475 | if (m > max_code) continue; | 
|  | 476 | if (tree[m].Len != (unsigned) bits) { | 
|  | 477 | Trace((stderr,"code %d bits %d->%d\n", m, tree[m].Len, bits)); | 
|  | 478 | s->opt_len += ((long)bits - (long)tree[m].Len) | 
|  | 479 | *(long)tree[m].Freq; | 
|  | 480 | tree[m].Len = (ush)bits; | 
|  | 481 | } | 
|  | 482 | n--; | 
|  | 483 | } | 
|  | 484 | } | 
|  | 485 | } | 
|  | 486 |  | 
|  | 487 | /* =========================================================================== | 
|  | 488 | * Generate the codes for a given tree and bit counts (which need not be | 
|  | 489 | * optimal). | 
|  | 490 | * IN assertion: the array bl_count contains the bit length statistics for | 
|  | 491 | * the given tree and the field len is set for all tree elements. | 
|  | 492 | * OUT assertion: the field code is set for all tree elements of non | 
|  | 493 | *     zero code length. | 
|  | 494 | */ | 
|  | 495 | static void gen_codes( | 
|  | 496 | ct_data *tree,             /* the tree to decorate */ | 
|  | 497 | int max_code,              /* largest code with non zero frequency */ | 
|  | 498 | ush *bl_count             /* number of codes at each bit length */ | 
|  | 499 | ) | 
|  | 500 | { | 
|  | 501 | ush next_code[MAX_BITS+1]; /* next code value for each bit length */ | 
|  | 502 | ush code = 0;              /* running code value */ | 
|  | 503 | int bits;                  /* bit index */ | 
|  | 504 | int n;                     /* code index */ | 
|  | 505 |  | 
|  | 506 | /* The distribution counts are first used to generate the code values | 
|  | 507 | * without bit reversal. | 
|  | 508 | */ | 
|  | 509 | for (bits = 1; bits <= MAX_BITS; bits++) { | 
|  | 510 | next_code[bits] = code = (code + bl_count[bits-1]) << 1; | 
|  | 511 | } | 
|  | 512 | /* Check that the bit counts in bl_count are consistent. The last code | 
|  | 513 | * must be all ones. | 
|  | 514 | */ | 
|  | 515 | Assert (code + bl_count[MAX_BITS]-1 == (1<<MAX_BITS)-1, | 
|  | 516 | "inconsistent bit counts"); | 
|  | 517 | Tracev((stderr,"\ngen_codes: max_code %d ", max_code)); | 
|  | 518 |  | 
|  | 519 | for (n = 0;  n <= max_code; n++) { | 
|  | 520 | int len = tree[n].Len; | 
|  | 521 | if (len == 0) continue; | 
|  | 522 | /* Now reverse the bits */ | 
|  | 523 | tree[n].Code = bi_reverse(next_code[len]++, len); | 
|  | 524 |  | 
|  | 525 | Tracecv(tree != static_ltree, (stderr,"\nn %3d %c l %2d c %4x (%x) ", | 
|  | 526 | n, (isgraph(n) ? n : ' '), len, tree[n].Code, next_code[len]-1)); | 
|  | 527 | } | 
|  | 528 | } | 
|  | 529 |  | 
|  | 530 | /* =========================================================================== | 
|  | 531 | * Construct one Huffman tree and assigns the code bit strings and lengths. | 
|  | 532 | * Update the total bit length for the current block. | 
|  | 533 | * IN assertion: the field freq is set for all tree elements. | 
|  | 534 | * OUT assertions: the fields len and code are set to the optimal bit length | 
|  | 535 | *     and corresponding code. The length opt_len is updated; static_len is | 
|  | 536 | *     also updated if stree is not null. The field max_code is set. | 
|  | 537 | */ | 
|  | 538 | static void build_tree( | 
|  | 539 | deflate_state *s, | 
|  | 540 | tree_desc *desc	 /* the tree descriptor */ | 
|  | 541 | ) | 
|  | 542 | { | 
|  | 543 | ct_data *tree         = desc->dyn_tree; | 
|  | 544 | const ct_data *stree  = desc->stat_desc->static_tree; | 
|  | 545 | int elems             = desc->stat_desc->elems; | 
|  | 546 | int n, m;          /* iterate over heap elements */ | 
|  | 547 | int max_code = -1; /* largest code with non zero frequency */ | 
|  | 548 | int node;          /* new node being created */ | 
|  | 549 |  | 
|  | 550 | /* Construct the initial heap, with least frequent element in | 
|  | 551 | * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1]. | 
|  | 552 | * heap[0] is not used. | 
|  | 553 | */ | 
|  | 554 | s->heap_len = 0, s->heap_max = HEAP_SIZE; | 
|  | 555 |  | 
|  | 556 | for (n = 0; n < elems; n++) { | 
|  | 557 | if (tree[n].Freq != 0) { | 
|  | 558 | s->heap[++(s->heap_len)] = max_code = n; | 
|  | 559 | s->depth[n] = 0; | 
|  | 560 | } else { | 
|  | 561 | tree[n].Len = 0; | 
|  | 562 | } | 
|  | 563 | } | 
|  | 564 |  | 
|  | 565 | /* The pkzip format requires that at least one distance code exists, | 
|  | 566 | * and that at least one bit should be sent even if there is only one | 
|  | 567 | * possible code. So to avoid special checks later on we force at least | 
|  | 568 | * two codes of non zero frequency. | 
|  | 569 | */ | 
|  | 570 | while (s->heap_len < 2) { | 
|  | 571 | node = s->heap[++(s->heap_len)] = (max_code < 2 ? ++max_code : 0); | 
|  | 572 | tree[node].Freq = 1; | 
|  | 573 | s->depth[node] = 0; | 
|  | 574 | s->opt_len--; if (stree) s->static_len -= stree[node].Len; | 
|  | 575 | /* node is 0 or 1 so it does not have extra bits */ | 
|  | 576 | } | 
|  | 577 | desc->max_code = max_code; | 
|  | 578 |  | 
|  | 579 | /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree, | 
|  | 580 | * establish sub-heaps of increasing lengths: | 
|  | 581 | */ | 
|  | 582 | for (n = s->heap_len/2; n >= 1; n--) pqdownheap(s, tree, n); | 
|  | 583 |  | 
|  | 584 | /* Construct the Huffman tree by repeatedly combining the least two | 
|  | 585 | * frequent nodes. | 
|  | 586 | */ | 
|  | 587 | node = elems;              /* next internal node of the tree */ | 
|  | 588 | do { | 
|  | 589 | pqremove(s, tree, n);  /* n = node of least frequency */ | 
|  | 590 | m = s->heap[SMALLEST]; /* m = node of next least frequency */ | 
|  | 591 |  | 
|  | 592 | s->heap[--(s->heap_max)] = n; /* keep the nodes sorted by frequency */ | 
|  | 593 | s->heap[--(s->heap_max)] = m; | 
|  | 594 |  | 
|  | 595 | /* Create a new node father of n and m */ | 
|  | 596 | tree[node].Freq = tree[n].Freq + tree[m].Freq; | 
|  | 597 | s->depth[node] = (uch) (max(s->depth[n], s->depth[m]) + 1); | 
|  | 598 | tree[n].Dad = tree[m].Dad = (ush)node; | 
|  | 599 | #ifdef DUMP_BL_TREE | 
|  | 600 | if (tree == s->bl_tree) { | 
|  | 601 | fprintf(stderr,"\nnode %d(%d), sons %d(%d) %d(%d)", | 
|  | 602 | node, tree[node].Freq, n, tree[n].Freq, m, tree[m].Freq); | 
|  | 603 | } | 
|  | 604 | #endif | 
|  | 605 | /* and insert the new node in the heap */ | 
|  | 606 | s->heap[SMALLEST] = node++; | 
|  | 607 | pqdownheap(s, tree, SMALLEST); | 
|  | 608 |  | 
|  | 609 | } while (s->heap_len >= 2); | 
|  | 610 |  | 
|  | 611 | s->heap[--(s->heap_max)] = s->heap[SMALLEST]; | 
|  | 612 |  | 
|  | 613 | /* At this point, the fields freq and dad are set. We can now | 
|  | 614 | * generate the bit lengths. | 
|  | 615 | */ | 
|  | 616 | gen_bitlen(s, (tree_desc *)desc); | 
|  | 617 |  | 
|  | 618 | /* The field len is now set, we can generate the bit codes */ | 
|  | 619 | gen_codes ((ct_data *)tree, max_code, s->bl_count); | 
|  | 620 | } | 
|  | 621 |  | 
|  | 622 | /* =========================================================================== | 
|  | 623 | * Scan a literal or distance tree to determine the frequencies of the codes | 
|  | 624 | * in the bit length tree. | 
|  | 625 | */ | 
|  | 626 | static void scan_tree( | 
|  | 627 | deflate_state *s, | 
|  | 628 | ct_data *tree,   /* the tree to be scanned */ | 
|  | 629 | int max_code     /* and its largest code of non zero frequency */ | 
|  | 630 | ) | 
|  | 631 | { | 
|  | 632 | int n;                     /* iterates over all tree elements */ | 
|  | 633 | int prevlen = -1;          /* last emitted length */ | 
|  | 634 | int curlen;                /* length of current code */ | 
|  | 635 | int nextlen = tree[0].Len; /* length of next code */ | 
|  | 636 | int count = 0;             /* repeat count of the current code */ | 
|  | 637 | int max_count = 7;         /* max repeat count */ | 
|  | 638 | int min_count = 4;         /* min repeat count */ | 
|  | 639 |  | 
|  | 640 | if (nextlen == 0) max_count = 138, min_count = 3; | 
|  | 641 | tree[max_code+1].Len = (ush)0xffff; /* guard */ | 
|  | 642 |  | 
|  | 643 | for (n = 0; n <= max_code; n++) { | 
|  | 644 | curlen = nextlen; nextlen = tree[n+1].Len; | 
|  | 645 | if (++count < max_count && curlen == nextlen) { | 
|  | 646 | continue; | 
|  | 647 | } else if (count < min_count) { | 
|  | 648 | s->bl_tree[curlen].Freq += count; | 
|  | 649 | } else if (curlen != 0) { | 
|  | 650 | if (curlen != prevlen) s->bl_tree[curlen].Freq++; | 
|  | 651 | s->bl_tree[REP_3_6].Freq++; | 
|  | 652 | } else if (count <= 10) { | 
|  | 653 | s->bl_tree[REPZ_3_10].Freq++; | 
|  | 654 | } else { | 
|  | 655 | s->bl_tree[REPZ_11_138].Freq++; | 
|  | 656 | } | 
|  | 657 | count = 0; prevlen = curlen; | 
|  | 658 | if (nextlen == 0) { | 
|  | 659 | max_count = 138, min_count = 3; | 
|  | 660 | } else if (curlen == nextlen) { | 
|  | 661 | max_count = 6, min_count = 3; | 
|  | 662 | } else { | 
|  | 663 | max_count = 7, min_count = 4; | 
|  | 664 | } | 
|  | 665 | } | 
|  | 666 | } | 
|  | 667 |  | 
|  | 668 | /* =========================================================================== | 
|  | 669 | * Send a literal or distance tree in compressed form, using the codes in | 
|  | 670 | * bl_tree. | 
|  | 671 | */ | 
|  | 672 | static void send_tree( | 
|  | 673 | deflate_state *s, | 
|  | 674 | ct_data *tree, /* the tree to be scanned */ | 
|  | 675 | int max_code   /* and its largest code of non zero frequency */ | 
|  | 676 | ) | 
|  | 677 | { | 
|  | 678 | int n;                     /* iterates over all tree elements */ | 
|  | 679 | int prevlen = -1;          /* last emitted length */ | 
|  | 680 | int curlen;                /* length of current code */ | 
|  | 681 | int nextlen = tree[0].Len; /* length of next code */ | 
|  | 682 | int count = 0;             /* repeat count of the current code */ | 
|  | 683 | int max_count = 7;         /* max repeat count */ | 
|  | 684 | int min_count = 4;         /* min repeat count */ | 
|  | 685 |  | 
|  | 686 | /* tree[max_code+1].Len = -1; */  /* guard already set */ | 
|  | 687 | if (nextlen == 0) max_count = 138, min_count = 3; | 
|  | 688 |  | 
|  | 689 | for (n = 0; n <= max_code; n++) { | 
|  | 690 | curlen = nextlen; nextlen = tree[n+1].Len; | 
|  | 691 | if (++count < max_count && curlen == nextlen) { | 
|  | 692 | continue; | 
|  | 693 | } else if (count < min_count) { | 
|  | 694 | do { send_code(s, curlen, s->bl_tree); } while (--count != 0); | 
|  | 695 |  | 
|  | 696 | } else if (curlen != 0) { | 
|  | 697 | if (curlen != prevlen) { | 
|  | 698 | send_code(s, curlen, s->bl_tree); count--; | 
|  | 699 | } | 
|  | 700 | Assert(count >= 3 && count <= 6, " 3_6?"); | 
|  | 701 | send_code(s, REP_3_6, s->bl_tree); send_bits(s, count-3, 2); | 
|  | 702 |  | 
|  | 703 | } else if (count <= 10) { | 
|  | 704 | send_code(s, REPZ_3_10, s->bl_tree); send_bits(s, count-3, 3); | 
|  | 705 |  | 
|  | 706 | } else { | 
|  | 707 | send_code(s, REPZ_11_138, s->bl_tree); send_bits(s, count-11, 7); | 
|  | 708 | } | 
|  | 709 | count = 0; prevlen = curlen; | 
|  | 710 | if (nextlen == 0) { | 
|  | 711 | max_count = 138, min_count = 3; | 
|  | 712 | } else if (curlen == nextlen) { | 
|  | 713 | max_count = 6, min_count = 3; | 
|  | 714 | } else { | 
|  | 715 | max_count = 7, min_count = 4; | 
|  | 716 | } | 
|  | 717 | } | 
|  | 718 | } | 
|  | 719 |  | 
|  | 720 | /* =========================================================================== | 
|  | 721 | * Construct the Huffman tree for the bit lengths and return the index in | 
|  | 722 | * bl_order of the last bit length code to send. | 
|  | 723 | */ | 
|  | 724 | static int build_bl_tree( | 
|  | 725 | deflate_state *s | 
|  | 726 | ) | 
|  | 727 | { | 
|  | 728 | int max_blindex;  /* index of last bit length code of non zero freq */ | 
|  | 729 |  | 
|  | 730 | /* Determine the bit length frequencies for literal and distance trees */ | 
|  | 731 | scan_tree(s, (ct_data *)s->dyn_ltree, s->l_desc.max_code); | 
|  | 732 | scan_tree(s, (ct_data *)s->dyn_dtree, s->d_desc.max_code); | 
|  | 733 |  | 
|  | 734 | /* Build the bit length tree: */ | 
|  | 735 | build_tree(s, (tree_desc *)(&(s->bl_desc))); | 
|  | 736 | /* opt_len now includes the length of the tree representations, except | 
|  | 737 | * the lengths of the bit lengths codes and the 5+5+4 bits for the counts. | 
|  | 738 | */ | 
|  | 739 |  | 
|  | 740 | /* Determine the number of bit length codes to send. The pkzip format | 
|  | 741 | * requires that at least 4 bit length codes be sent. (appnote.txt says | 
|  | 742 | * 3 but the actual value used is 4.) | 
|  | 743 | */ | 
|  | 744 | for (max_blindex = BL_CODES-1; max_blindex >= 3; max_blindex--) { | 
|  | 745 | if (s->bl_tree[bl_order[max_blindex]].Len != 0) break; | 
|  | 746 | } | 
|  | 747 | /* Update opt_len to include the bit length tree and counts */ | 
|  | 748 | s->opt_len += 3*(max_blindex+1) + 5+5+4; | 
|  | 749 | Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld", | 
|  | 750 | s->opt_len, s->static_len)); | 
|  | 751 |  | 
|  | 752 | return max_blindex; | 
|  | 753 | } | 
|  | 754 |  | 
|  | 755 | /* =========================================================================== | 
|  | 756 | * Send the header for a block using dynamic Huffman trees: the counts, the | 
|  | 757 | * lengths of the bit length codes, the literal tree and the distance tree. | 
|  | 758 | * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4. | 
|  | 759 | */ | 
|  | 760 | static void send_all_trees( | 
|  | 761 | deflate_state *s, | 
|  | 762 | int lcodes,  /* number of codes for each tree */ | 
|  | 763 | int dcodes,  /* number of codes for each tree */ | 
|  | 764 | int blcodes  /* number of codes for each tree */ | 
|  | 765 | ) | 
|  | 766 | { | 
|  | 767 | int rank;                    /* index in bl_order */ | 
|  | 768 |  | 
|  | 769 | Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes"); | 
|  | 770 | Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES, | 
|  | 771 | "too many codes"); | 
|  | 772 | Tracev((stderr, "\nbl counts: ")); | 
|  | 773 | send_bits(s, lcodes-257, 5); /* not +255 as stated in appnote.txt */ | 
|  | 774 | send_bits(s, dcodes-1,   5); | 
|  | 775 | send_bits(s, blcodes-4,  4); /* not -3 as stated in appnote.txt */ | 
|  | 776 | for (rank = 0; rank < blcodes; rank++) { | 
|  | 777 | Tracev((stderr, "\nbl code %2d ", bl_order[rank])); | 
|  | 778 | send_bits(s, s->bl_tree[bl_order[rank]].Len, 3); | 
|  | 779 | } | 
|  | 780 | Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent)); | 
|  | 781 |  | 
|  | 782 | send_tree(s, (ct_data *)s->dyn_ltree, lcodes-1); /* literal tree */ | 
|  | 783 | Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent)); | 
|  | 784 |  | 
|  | 785 | send_tree(s, (ct_data *)s->dyn_dtree, dcodes-1); /* distance tree */ | 
|  | 786 | Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent)); | 
|  | 787 | } | 
|  | 788 |  | 
|  | 789 | /* =========================================================================== | 
|  | 790 | * Send a stored block | 
|  | 791 | */ | 
|  | 792 | void zlib_tr_stored_block( | 
|  | 793 | deflate_state *s, | 
|  | 794 | char *buf,        /* input block */ | 
|  | 795 | ulg stored_len,   /* length of input block */ | 
|  | 796 | int eof           /* true if this is the last block for a file */ | 
|  | 797 | ) | 
|  | 798 | { | 
|  | 799 | send_bits(s, (STORED_BLOCK<<1)+eof, 3);  /* send block type */ | 
|  | 800 | s->compressed_len = (s->compressed_len + 3 + 7) & (ulg)~7L; | 
|  | 801 | s->compressed_len += (stored_len + 4) << 3; | 
|  | 802 |  | 
|  | 803 | copy_block(s, buf, (unsigned)stored_len, 1); /* with header */ | 
|  | 804 | } | 
|  | 805 |  | 
|  | 806 | /* Send just the `stored block' type code without any length bytes or data. | 
|  | 807 | */ | 
|  | 808 | void zlib_tr_stored_type_only( | 
|  | 809 | deflate_state *s | 
|  | 810 | ) | 
|  | 811 | { | 
|  | 812 | send_bits(s, (STORED_BLOCK << 1), 3); | 
|  | 813 | bi_windup(s); | 
|  | 814 | s->compressed_len = (s->compressed_len + 3) & ~7L; | 
|  | 815 | } | 
|  | 816 |  | 
|  | 817 |  | 
|  | 818 | /* =========================================================================== | 
|  | 819 | * Send one empty static block to give enough lookahead for inflate. | 
|  | 820 | * This takes 10 bits, of which 7 may remain in the bit buffer. | 
|  | 821 | * The current inflate code requires 9 bits of lookahead. If the | 
|  | 822 | * last two codes for the previous block (real code plus EOB) were coded | 
|  | 823 | * on 5 bits or less, inflate may have only 5+3 bits of lookahead to decode | 
|  | 824 | * the last real code. In this case we send two empty static blocks instead | 
|  | 825 | * of one. (There are no problems if the previous block is stored or fixed.) | 
|  | 826 | * To simplify the code, we assume the worst case of last real code encoded | 
|  | 827 | * on one bit only. | 
|  | 828 | */ | 
|  | 829 | void zlib_tr_align( | 
|  | 830 | deflate_state *s | 
|  | 831 | ) | 
|  | 832 | { | 
|  | 833 | send_bits(s, STATIC_TREES<<1, 3); | 
|  | 834 | send_code(s, END_BLOCK, static_ltree); | 
|  | 835 | s->compressed_len += 10L; /* 3 for block type, 7 for EOB */ | 
|  | 836 | bi_flush(s); | 
|  | 837 | /* Of the 10 bits for the empty block, we have already sent | 
|  | 838 | * (10 - bi_valid) bits. The lookahead for the last real code (before | 
|  | 839 | * the EOB of the previous block) was thus at least one plus the length | 
|  | 840 | * of the EOB plus what we have just sent of the empty static block. | 
|  | 841 | */ | 
|  | 842 | if (1 + s->last_eob_len + 10 - s->bi_valid < 9) { | 
|  | 843 | send_bits(s, STATIC_TREES<<1, 3); | 
|  | 844 | send_code(s, END_BLOCK, static_ltree); | 
|  | 845 | s->compressed_len += 10L; | 
|  | 846 | bi_flush(s); | 
|  | 847 | } | 
|  | 848 | s->last_eob_len = 7; | 
|  | 849 | } | 
|  | 850 |  | 
|  | 851 | /* =========================================================================== | 
|  | 852 | * Determine the best encoding for the current block: dynamic trees, static | 
|  | 853 | * trees or store, and output the encoded block to the zip file. This function | 
|  | 854 | * returns the total compressed length for the file so far. | 
|  | 855 | */ | 
|  | 856 | ulg zlib_tr_flush_block( | 
|  | 857 | deflate_state *s, | 
|  | 858 | char *buf,        /* input block, or NULL if too old */ | 
|  | 859 | ulg stored_len,   /* length of input block */ | 
|  | 860 | int eof           /* true if this is the last block for a file */ | 
|  | 861 | ) | 
|  | 862 | { | 
|  | 863 | ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */ | 
|  | 864 | int max_blindex = 0;  /* index of last bit length code of non zero freq */ | 
|  | 865 |  | 
|  | 866 | /* Build the Huffman trees unless a stored block is forced */ | 
|  | 867 | if (s->level > 0) { | 
|  | 868 |  | 
|  | 869 | /* Check if the file is ascii or binary */ | 
|  | 870 | if (s->data_type == Z_UNKNOWN) set_data_type(s); | 
|  | 871 |  | 
|  | 872 | /* Construct the literal and distance trees */ | 
|  | 873 | build_tree(s, (tree_desc *)(&(s->l_desc))); | 
|  | 874 | Tracev((stderr, "\nlit data: dyn %ld, stat %ld", s->opt_len, | 
|  | 875 | s->static_len)); | 
|  | 876 |  | 
|  | 877 | build_tree(s, (tree_desc *)(&(s->d_desc))); | 
|  | 878 | Tracev((stderr, "\ndist data: dyn %ld, stat %ld", s->opt_len, | 
|  | 879 | s->static_len)); | 
|  | 880 | /* At this point, opt_len and static_len are the total bit lengths of | 
|  | 881 | * the compressed block data, excluding the tree representations. | 
|  | 882 | */ | 
|  | 883 |  | 
|  | 884 | /* Build the bit length tree for the above two trees, and get the index | 
|  | 885 | * in bl_order of the last bit length code to send. | 
|  | 886 | */ | 
|  | 887 | max_blindex = build_bl_tree(s); | 
|  | 888 |  | 
|  | 889 | /* Determine the best encoding. Compute first the block length in bytes*/ | 
|  | 890 | opt_lenb = (s->opt_len+3+7)>>3; | 
|  | 891 | static_lenb = (s->static_len+3+7)>>3; | 
|  | 892 |  | 
|  | 893 | Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ", | 
|  | 894 | opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len, | 
|  | 895 | s->last_lit)); | 
|  | 896 |  | 
|  | 897 | if (static_lenb <= opt_lenb) opt_lenb = static_lenb; | 
|  | 898 |  | 
|  | 899 | } else { | 
|  | 900 | Assert(buf != (char*)0, "lost buf"); | 
|  | 901 | opt_lenb = static_lenb = stored_len + 5; /* force a stored block */ | 
|  | 902 | } | 
|  | 903 |  | 
|  | 904 | /* If compression failed and this is the first and last block, | 
|  | 905 | * and if the .zip file can be seeked (to rewrite the local header), | 
|  | 906 | * the whole file is transformed into a stored file: | 
|  | 907 | */ | 
|  | 908 | #ifdef STORED_FILE_OK | 
|  | 909 | #  ifdef FORCE_STORED_FILE | 
|  | 910 | if (eof && s->compressed_len == 0L) { /* force stored file */ | 
|  | 911 | #  else | 
|  | 912 | if (stored_len <= opt_lenb && eof && s->compressed_len==0L && seekable()) { | 
|  | 913 | #  endif | 
|  | 914 | /* Since LIT_BUFSIZE <= 2*WSIZE, the input data must be there: */ | 
|  | 915 | if (buf == (char*)0) error ("block vanished"); | 
|  | 916 |  | 
|  | 917 | copy_block(s, buf, (unsigned)stored_len, 0); /* without header */ | 
|  | 918 | s->compressed_len = stored_len << 3; | 
|  | 919 | s->method = STORED; | 
|  | 920 | } else | 
|  | 921 | #endif /* STORED_FILE_OK */ | 
|  | 922 |  | 
|  | 923 | #ifdef FORCE_STORED | 
|  | 924 | if (buf != (char*)0) { /* force stored block */ | 
|  | 925 | #else | 
|  | 926 | if (stored_len+4 <= opt_lenb && buf != (char*)0) { | 
|  | 927 | /* 4: two words for the lengths */ | 
|  | 928 | #endif | 
|  | 929 | /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE. | 
|  | 930 | * Otherwise we can't have processed more than WSIZE input bytes since | 
|  | 931 | * the last block flush, because compression would have been | 
|  | 932 | * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to | 
|  | 933 | * transform a block into a stored block. | 
|  | 934 | */ | 
|  | 935 | zlib_tr_stored_block(s, buf, stored_len, eof); | 
|  | 936 |  | 
|  | 937 | #ifdef FORCE_STATIC | 
|  | 938 | } else if (static_lenb >= 0) { /* force static trees */ | 
|  | 939 | #else | 
|  | 940 | } else if (static_lenb == opt_lenb) { | 
|  | 941 | #endif | 
|  | 942 | send_bits(s, (STATIC_TREES<<1)+eof, 3); | 
|  | 943 | compress_block(s, (ct_data *)static_ltree, (ct_data *)static_dtree); | 
|  | 944 | s->compressed_len += 3 + s->static_len; | 
|  | 945 | } else { | 
|  | 946 | send_bits(s, (DYN_TREES<<1)+eof, 3); | 
|  | 947 | send_all_trees(s, s->l_desc.max_code+1, s->d_desc.max_code+1, | 
|  | 948 | max_blindex+1); | 
|  | 949 | compress_block(s, (ct_data *)s->dyn_ltree, (ct_data *)s->dyn_dtree); | 
|  | 950 | s->compressed_len += 3 + s->opt_len; | 
|  | 951 | } | 
|  | 952 | Assert (s->compressed_len == s->bits_sent, "bad compressed size"); | 
|  | 953 | init_block(s); | 
|  | 954 |  | 
|  | 955 | if (eof) { | 
|  | 956 | bi_windup(s); | 
|  | 957 | s->compressed_len += 7;  /* align on byte boundary */ | 
|  | 958 | } | 
|  | 959 | Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3, | 
|  | 960 | s->compressed_len-7*eof)); | 
|  | 961 |  | 
|  | 962 | return s->compressed_len >> 3; | 
|  | 963 | } | 
|  | 964 |  | 
|  | 965 | /* =========================================================================== | 
|  | 966 | * Save the match info and tally the frequency counts. Return true if | 
|  | 967 | * the current block must be flushed. | 
|  | 968 | */ | 
|  | 969 | int zlib_tr_tally( | 
|  | 970 | deflate_state *s, | 
|  | 971 | unsigned dist,  /* distance of matched string */ | 
|  | 972 | unsigned lc     /* match length-MIN_MATCH or unmatched char (if dist==0) */ | 
|  | 973 | ) | 
|  | 974 | { | 
|  | 975 | s->d_buf[s->last_lit] = (ush)dist; | 
|  | 976 | s->l_buf[s->last_lit++] = (uch)lc; | 
|  | 977 | if (dist == 0) { | 
|  | 978 | /* lc is the unmatched char */ | 
|  | 979 | s->dyn_ltree[lc].Freq++; | 
|  | 980 | } else { | 
|  | 981 | s->matches++; | 
|  | 982 | /* Here, lc is the match length - MIN_MATCH */ | 
|  | 983 | dist--;             /* dist = match distance - 1 */ | 
|  | 984 | Assert((ush)dist < (ush)MAX_DIST(s) && | 
|  | 985 | (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) && | 
|  | 986 | (ush)d_code(dist) < (ush)D_CODES,  "zlib_tr_tally: bad match"); | 
|  | 987 |  | 
|  | 988 | s->dyn_ltree[length_code[lc]+LITERALS+1].Freq++; | 
|  | 989 | s->dyn_dtree[d_code(dist)].Freq++; | 
|  | 990 | } | 
|  | 991 |  | 
|  | 992 | /* Try to guess if it is profitable to stop the current block here */ | 
|  | 993 | if ((s->last_lit & 0xfff) == 0 && s->level > 2) { | 
|  | 994 | /* Compute an upper bound for the compressed length */ | 
|  | 995 | ulg out_length = (ulg)s->last_lit*8L; | 
|  | 996 | ulg in_length = (ulg)((long)s->strstart - s->block_start); | 
|  | 997 | int dcode; | 
|  | 998 | for (dcode = 0; dcode < D_CODES; dcode++) { | 
|  | 999 | out_length += (ulg)s->dyn_dtree[dcode].Freq * | 
|  | 1000 | (5L+extra_dbits[dcode]); | 
|  | 1001 | } | 
|  | 1002 | out_length >>= 3; | 
|  | 1003 | Tracev((stderr,"\nlast_lit %u, in %ld, out ~%ld(%ld%%) ", | 
|  | 1004 | s->last_lit, in_length, out_length, | 
|  | 1005 | 100L - out_length*100L/in_length)); | 
|  | 1006 | if (s->matches < s->last_lit/2 && out_length < in_length/2) return 1; | 
|  | 1007 | } | 
|  | 1008 | return (s->last_lit == s->lit_bufsize-1); | 
|  | 1009 | /* We avoid equality with lit_bufsize because of wraparound at 64K | 
|  | 1010 | * on 16 bit machines and because stored blocks are restricted to | 
|  | 1011 | * 64K-1 bytes. | 
|  | 1012 | */ | 
|  | 1013 | } | 
|  | 1014 |  | 
|  | 1015 | /* =========================================================================== | 
|  | 1016 | * Send the block data compressed using the given Huffman trees | 
|  | 1017 | */ | 
|  | 1018 | static void compress_block( | 
|  | 1019 | deflate_state *s, | 
|  | 1020 | ct_data *ltree, /* literal tree */ | 
|  | 1021 | ct_data *dtree  /* distance tree */ | 
|  | 1022 | ) | 
|  | 1023 | { | 
|  | 1024 | unsigned dist;      /* distance of matched string */ | 
|  | 1025 | int lc;             /* match length or unmatched char (if dist == 0) */ | 
|  | 1026 | unsigned lx = 0;    /* running index in l_buf */ | 
|  | 1027 | unsigned code;      /* the code to send */ | 
|  | 1028 | int extra;          /* number of extra bits to send */ | 
|  | 1029 |  | 
|  | 1030 | if (s->last_lit != 0) do { | 
|  | 1031 | dist = s->d_buf[lx]; | 
|  | 1032 | lc = s->l_buf[lx++]; | 
|  | 1033 | if (dist == 0) { | 
|  | 1034 | send_code(s, lc, ltree); /* send a literal byte */ | 
|  | 1035 | Tracecv(isgraph(lc), (stderr," '%c' ", lc)); | 
|  | 1036 | } else { | 
|  | 1037 | /* Here, lc is the match length - MIN_MATCH */ | 
|  | 1038 | code = length_code[lc]; | 
|  | 1039 | send_code(s, code+LITERALS+1, ltree); /* send the length code */ | 
|  | 1040 | extra = extra_lbits[code]; | 
|  | 1041 | if (extra != 0) { | 
|  | 1042 | lc -= base_length[code]; | 
|  | 1043 | send_bits(s, lc, extra);       /* send the extra length bits */ | 
|  | 1044 | } | 
|  | 1045 | dist--; /* dist is now the match distance - 1 */ | 
|  | 1046 | code = d_code(dist); | 
|  | 1047 | Assert (code < D_CODES, "bad d_code"); | 
|  | 1048 |  | 
|  | 1049 | send_code(s, code, dtree);       /* send the distance code */ | 
|  | 1050 | extra = extra_dbits[code]; | 
|  | 1051 | if (extra != 0) { | 
|  | 1052 | dist -= base_dist[code]; | 
|  | 1053 | send_bits(s, dist, extra);   /* send the extra distance bits */ | 
|  | 1054 | } | 
|  | 1055 | } /* literal or match pair ? */ | 
|  | 1056 |  | 
|  | 1057 | /* Check that the overlay between pending_buf and d_buf+l_buf is ok: */ | 
|  | 1058 | Assert(s->pending < s->lit_bufsize + 2*lx, "pendingBuf overflow"); | 
|  | 1059 |  | 
|  | 1060 | } while (lx < s->last_lit); | 
|  | 1061 |  | 
|  | 1062 | send_code(s, END_BLOCK, ltree); | 
|  | 1063 | s->last_eob_len = ltree[END_BLOCK].Len; | 
|  | 1064 | } | 
|  | 1065 |  | 
|  | 1066 | /* =========================================================================== | 
|  | 1067 | * Set the data type to ASCII or BINARY, using a crude approximation: | 
|  | 1068 | * binary if more than 20% of the bytes are <= 6 or >= 128, ascii otherwise. | 
|  | 1069 | * IN assertion: the fields freq of dyn_ltree are set and the total of all | 
|  | 1070 | * frequencies does not exceed 64K (to fit in an int on 16 bit machines). | 
|  | 1071 | */ | 
|  | 1072 | static void set_data_type( | 
|  | 1073 | deflate_state *s | 
|  | 1074 | ) | 
|  | 1075 | { | 
|  | 1076 | int n = 0; | 
|  | 1077 | unsigned ascii_freq = 0; | 
|  | 1078 | unsigned bin_freq = 0; | 
|  | 1079 | while (n < 7)        bin_freq += s->dyn_ltree[n++].Freq; | 
|  | 1080 | while (n < 128)    ascii_freq += s->dyn_ltree[n++].Freq; | 
|  | 1081 | while (n < LITERALS) bin_freq += s->dyn_ltree[n++].Freq; | 
|  | 1082 | s->data_type = (Byte)(bin_freq > (ascii_freq >> 2) ? Z_BINARY : Z_ASCII); | 
|  | 1083 | } | 
|  | 1084 |  | 
|  | 1085 | /* =========================================================================== | 
|  | 1086 | * Copy a stored block, storing first the length and its | 
|  | 1087 | * one's complement if requested. | 
|  | 1088 | */ | 
|  | 1089 | static void copy_block( | 
|  | 1090 | deflate_state *s, | 
|  | 1091 | char    *buf,     /* the input data */ | 
|  | 1092 | unsigned len,     /* its length */ | 
|  | 1093 | int      header   /* true if block header must be written */ | 
|  | 1094 | ) | 
|  | 1095 | { | 
|  | 1096 | bi_windup(s);        /* align on byte boundary */ | 
|  | 1097 | s->last_eob_len = 8; /* enough lookahead for inflate */ | 
|  | 1098 |  | 
|  | 1099 | if (header) { | 
|  | 1100 | put_short(s, (ush)len); | 
|  | 1101 | put_short(s, (ush)~len); | 
|  | 1102 | #ifdef DEBUG_ZLIB | 
|  | 1103 | s->bits_sent += 2*16; | 
|  | 1104 | #endif | 
|  | 1105 | } | 
|  | 1106 | #ifdef DEBUG_ZLIB | 
|  | 1107 | s->bits_sent += (ulg)len<<3; | 
|  | 1108 | #endif | 
|  | 1109 | /* bundle up the put_byte(s, *buf++) calls */ | 
|  | 1110 | memcpy(&s->pending_buf[s->pending], buf, len); | 
|  | 1111 | s->pending += len; | 
|  | 1112 | } | 
|  | 1113 |  |