diff options
| author | Mark Adler <fork@madler.net> | 2022-10-01 17:04:06 -0700 |
|---|---|---|
| committer | Mark Adler <fork@madler.net> | 2022-10-05 15:17:52 -0700 |
| commit | 84c6716a48743edfb71053ba07755e0cf7ba638d (patch) | |
| tree | 5e02319722d8682bd51f7de01f38b21bb13756c8 | |
| parent | 3e4aa45834c2e76c1f21f9c463c2f356f3bb512c (diff) | |
| download | zlib-84c6716a48743edfb71053ba07755e0cf7ba638d.tar.gz zlib-84c6716a48743edfb71053ba07755e0cf7ba638d.tar.bz2 zlib-84c6716a48743edfb71053ba07755e0cf7ba638d.zip | |
Minor formatting improvements.
No code changes.
| -rw-r--r-- | compress.c | 6 | ||||
| -rw-r--r-- | deflate.c | 137 | ||||
| -rw-r--r-- | gzlib.c | 2 | ||||
| -rw-r--r-- | gzwrite.c | 2 | ||||
| -rw-r--r-- | trees.c | 104 | ||||
| -rw-r--r-- | uncompr.c | 4 | ||||
| -rw-r--r-- | zlib.h | 2 | ||||
| -rw-r--r-- | zutil.c | 14 |
8 files changed, 138 insertions, 133 deletions
| @@ -19,7 +19,7 @@ | |||
| 19 | memory, Z_BUF_ERROR if there was not enough room in the output buffer, | 19 | memory, Z_BUF_ERROR if there was not enough room in the output buffer, |
| 20 | Z_STREAM_ERROR if the level parameter is invalid. | 20 | Z_STREAM_ERROR if the level parameter is invalid. |
| 21 | */ | 21 | */ |
| 22 | int ZEXPORT compress2 (dest, destLen, source, sourceLen, level) | 22 | int ZEXPORT compress2(dest, destLen, source, sourceLen, level) |
| 23 | Bytef *dest; | 23 | Bytef *dest; |
| 24 | uLongf *destLen; | 24 | uLongf *destLen; |
| 25 | const Bytef *source; | 25 | const Bytef *source; |
| @@ -65,7 +65,7 @@ int ZEXPORT compress2 (dest, destLen, source, sourceLen, level) | |||
| 65 | 65 | ||
| 66 | /* =========================================================================== | 66 | /* =========================================================================== |
| 67 | */ | 67 | */ |
| 68 | int ZEXPORT compress (dest, destLen, source, sourceLen) | 68 | int ZEXPORT compress(dest, destLen, source, sourceLen) |
| 69 | Bytef *dest; | 69 | Bytef *dest; |
| 70 | uLongf *destLen; | 70 | uLongf *destLen; |
| 71 | const Bytef *source; | 71 | const Bytef *source; |
| @@ -78,7 +78,7 @@ int ZEXPORT compress (dest, destLen, source, sourceLen) | |||
| 78 | If the default memLevel or windowBits for deflateInit() is changed, then | 78 | If the default memLevel or windowBits for deflateInit() is changed, then |
| 79 | this function needs to be updated. | 79 | this function needs to be updated. |
| 80 | */ | 80 | */ |
| 81 | uLong ZEXPORT compressBound (sourceLen) | 81 | uLong ZEXPORT compressBound(sourceLen) |
| 82 | uLong sourceLen; | 82 | uLong sourceLen; |
| 83 | { | 83 | { |
| 84 | return sourceLen + (sourceLen >> 12) + (sourceLen >> 14) + | 84 | return sourceLen + (sourceLen >> 12) + (sourceLen >> 14) + |
| @@ -160,7 +160,7 @@ local const config configuration_table[10] = { | |||
| 160 | * characters, so that a running hash key can be computed from the previous | 160 | * characters, so that a running hash key can be computed from the previous |
| 161 | * key instead of complete recalculation each time. | 161 | * key instead of complete recalculation each time. |
| 162 | */ | 162 | */ |
| 163 | #define UPDATE_HASH(s,h,c) (h = (((h)<<s->hash_shift) ^ (c)) & s->hash_mask) | 163 | #define UPDATE_HASH(s,h,c) (h = (((h) << s->hash_shift) ^ (c)) & s->hash_mask) |
| 164 | 164 | ||
| 165 | 165 | ||
| 166 | /* =========================================================================== | 166 | /* =========================================================================== |
| @@ -191,9 +191,9 @@ local const config configuration_table[10] = { | |||
| 191 | */ | 191 | */ |
| 192 | #define CLEAR_HASH(s) \ | 192 | #define CLEAR_HASH(s) \ |
| 193 | do { \ | 193 | do { \ |
| 194 | s->head[s->hash_size-1] = NIL; \ | 194 | s->head[s->hash_size - 1] = NIL; \ |
| 195 | zmemzero((Bytef *)s->head, \ | 195 | zmemzero((Bytef *)s->head, \ |
| 196 | (unsigned)(s->hash_size-1)*sizeof(*s->head)); \ | 196 | (unsigned)(s->hash_size - 1)*sizeof(*s->head)); \ |
| 197 | } while (0) | 197 | } while (0) |
| 198 | 198 | ||
| 199 | /* =========================================================================== | 199 | /* =========================================================================== |
| @@ -314,7 +314,7 @@ int ZEXPORT deflateInit2_(strm, level, method, windowBits, memLevel, strategy, | |||
| 314 | s->hash_bits = (uInt)memLevel + 7; | 314 | s->hash_bits = (uInt)memLevel + 7; |
| 315 | s->hash_size = 1 << s->hash_bits; | 315 | s->hash_size = 1 << s->hash_bits; |
| 316 | s->hash_mask = s->hash_size - 1; | 316 | s->hash_mask = s->hash_size - 1; |
| 317 | s->hash_shift = ((s->hash_bits+MIN_MATCH-1)/MIN_MATCH); | 317 | s->hash_shift = ((s->hash_bits + MIN_MATCH-1) / MIN_MATCH); |
| 318 | 318 | ||
| 319 | s->window = (Bytef *) ZALLOC(strm, s->w_size, 2*sizeof(Byte)); | 319 | s->window = (Bytef *) ZALLOC(strm, s->w_size, 2*sizeof(Byte)); |
| 320 | s->prev = (Posf *) ZALLOC(strm, s->w_size, sizeof(Pos)); | 320 | s->prev = (Posf *) ZALLOC(strm, s->w_size, sizeof(Pos)); |
| @@ -340,11 +340,11 @@ int ZEXPORT deflateInit2_(strm, level, method, windowBits, memLevel, strategy, | |||
| 340 | * sym_buf value to read moves forward three bytes. From that symbol, up to | 340 | * sym_buf value to read moves forward three bytes. From that symbol, up to |
| 341 | * 31 bits are written to pending_buf. The closest the written pending_buf | 341 | * 31 bits are written to pending_buf. The closest the written pending_buf |
| 342 | * bits gets to the next sym_buf symbol to read is just before the last | 342 | * bits gets to the next sym_buf symbol to read is just before the last |
| 343 | * code is written. At that time, 31*(n-2) bits have been written, just | 343 | * code is written. At that time, 31*(n - 2) bits have been written, just |
| 344 | * after 24*(n-2) bits have been consumed from sym_buf. sym_buf starts at | 344 | * after 24*(n - 2) bits have been consumed from sym_buf. sym_buf starts at |
| 345 | * 8*n bits into pending_buf. (Note that the symbol buffer fills when n-1 | 345 | * 8*n bits into pending_buf. (Note that the symbol buffer fills when n - 1 |
| 346 | * symbols are written.) The closest the writing gets to what is unread is | 346 | * symbols are written.) The closest the writing gets to what is unread is |
| 347 | * then n+14 bits. Here n is lit_bufsize, which is 16384 by default, and | 347 | * then n + 14 bits. Here n is lit_bufsize, which is 16384 by default, and |
| 348 | * can range from 128 to 32768. | 348 | * can range from 128 to 32768. |
| 349 | * | 349 | * |
| 350 | * Therefore, at a minimum, there are 142 bits of space between what is | 350 | * Therefore, at a minimum, there are 142 bits of space between what is |
| @@ -390,7 +390,7 @@ int ZEXPORT deflateInit2_(strm, level, method, windowBits, memLevel, strategy, | |||
| 390 | /* ========================================================================= | 390 | /* ========================================================================= |
| 391 | * Check for a valid deflate stream state. Return 0 if ok, 1 if not. | 391 | * Check for a valid deflate stream state. Return 0 if ok, 1 if not. |
| 392 | */ | 392 | */ |
| 393 | local int deflateStateCheck (strm) | 393 | local int deflateStateCheck(strm) |
| 394 | z_streamp strm; | 394 | z_streamp strm; |
| 395 | { | 395 | { |
| 396 | deflate_state *s; | 396 | deflate_state *s; |
| @@ -413,7 +413,7 @@ local int deflateStateCheck (strm) | |||
| 413 | } | 413 | } |
| 414 | 414 | ||
| 415 | /* ========================================================================= */ | 415 | /* ========================================================================= */ |
| 416 | int ZEXPORT deflateSetDictionary (strm, dictionary, dictLength) | 416 | int ZEXPORT deflateSetDictionary(strm, dictionary, dictLength) |
| 417 | z_streamp strm; | 417 | z_streamp strm; |
| 418 | const Bytef *dictionary; | 418 | const Bytef *dictionary; |
| 419 | uInt dictLength; | 419 | uInt dictLength; |
| @@ -482,7 +482,7 @@ int ZEXPORT deflateSetDictionary (strm, dictionary, dictLength) | |||
| 482 | } | 482 | } |
| 483 | 483 | ||
| 484 | /* ========================================================================= */ | 484 | /* ========================================================================= */ |
| 485 | int ZEXPORT deflateGetDictionary (strm, dictionary, dictLength) | 485 | int ZEXPORT deflateGetDictionary(strm, dictionary, dictLength) |
| 486 | z_streamp strm; | 486 | z_streamp strm; |
| 487 | Bytef *dictionary; | 487 | Bytef *dictionary; |
| 488 | uInt *dictLength; | 488 | uInt *dictLength; |
| @@ -504,7 +504,7 @@ int ZEXPORT deflateGetDictionary (strm, dictionary, dictLength) | |||
| 504 | } | 504 | } |
| 505 | 505 | ||
| 506 | /* ========================================================================= */ | 506 | /* ========================================================================= */ |
| 507 | int ZEXPORT deflateResetKeep (strm) | 507 | int ZEXPORT deflateResetKeep(strm) |
| 508 | z_streamp strm; | 508 | z_streamp strm; |
| 509 | { | 509 | { |
| 510 | deflate_state *s; | 510 | deflate_state *s; |
| @@ -542,7 +542,7 @@ int ZEXPORT deflateResetKeep (strm) | |||
| 542 | } | 542 | } |
| 543 | 543 | ||
| 544 | /* ========================================================================= */ | 544 | /* ========================================================================= */ |
| 545 | int ZEXPORT deflateReset (strm) | 545 | int ZEXPORT deflateReset(strm) |
| 546 | z_streamp strm; | 546 | z_streamp strm; |
| 547 | { | 547 | { |
| 548 | int ret; | 548 | int ret; |
| @@ -554,7 +554,7 @@ int ZEXPORT deflateReset (strm) | |||
| 554 | } | 554 | } |
| 555 | 555 | ||
| 556 | /* ========================================================================= */ | 556 | /* ========================================================================= */ |
| 557 | int ZEXPORT deflateSetHeader (strm, head) | 557 | int ZEXPORT deflateSetHeader(strm, head) |
| 558 | z_streamp strm; | 558 | z_streamp strm; |
| 559 | gz_headerp head; | 559 | gz_headerp head; |
| 560 | { | 560 | { |
| @@ -565,7 +565,7 @@ int ZEXPORT deflateSetHeader (strm, head) | |||
| 565 | } | 565 | } |
| 566 | 566 | ||
| 567 | /* ========================================================================= */ | 567 | /* ========================================================================= */ |
| 568 | int ZEXPORT deflatePending (strm, pending, bits) | 568 | int ZEXPORT deflatePending(strm, pending, bits) |
| 569 | unsigned *pending; | 569 | unsigned *pending; |
| 570 | int *bits; | 570 | int *bits; |
| 571 | z_streamp strm; | 571 | z_streamp strm; |
| @@ -579,7 +579,7 @@ int ZEXPORT deflatePending (strm, pending, bits) | |||
| 579 | } | 579 | } |
| 580 | 580 | ||
| 581 | /* ========================================================================= */ | 581 | /* ========================================================================= */ |
| 582 | int ZEXPORT deflatePrime (strm, bits, value) | 582 | int ZEXPORT deflatePrime(strm, bits, value) |
| 583 | z_streamp strm; | 583 | z_streamp strm; |
| 584 | int bits; | 584 | int bits; |
| 585 | int value; | 585 | int value; |
| @@ -769,7 +769,7 @@ uLong ZEXPORT deflateBound(strm, sourceLen) | |||
| 769 | * IN assertion: the stream state is correct and there is enough room in | 769 | * IN assertion: the stream state is correct and there is enough room in |
| 770 | * pending_buf. | 770 | * pending_buf. |
| 771 | */ | 771 | */ |
| 772 | local void putShortMSB (s, b) | 772 | local void putShortMSB(s, b) |
| 773 | deflate_state *s; | 773 | deflate_state *s; |
| 774 | uInt b; | 774 | uInt b; |
| 775 | { | 775 | { |
| @@ -816,7 +816,7 @@ local void flush_pending(strm) | |||
| 816 | } while (0) | 816 | } while (0) |
| 817 | 817 | ||
| 818 | /* ========================================================================= */ | 818 | /* ========================================================================= */ |
| 819 | int ZEXPORT deflate (strm, flush) | 819 | int ZEXPORT deflate(strm, flush) |
| 820 | z_streamp strm; | 820 | z_streamp strm; |
| 821 | int flush; | 821 | int flush; |
| 822 | { | 822 | { |
| @@ -871,7 +871,7 @@ int ZEXPORT deflate (strm, flush) | |||
| 871 | s->status = BUSY_STATE; | 871 | s->status = BUSY_STATE; |
| 872 | if (s->status == INIT_STATE) { | 872 | if (s->status == INIT_STATE) { |
| 873 | /* zlib header */ | 873 | /* zlib header */ |
| 874 | uInt header = (Z_DEFLATED + ((s->w_bits-8)<<4)) << 8; | 874 | uInt header = (Z_DEFLATED + ((s->w_bits - 8) << 4)) << 8; |
| 875 | uInt level_flags; | 875 | uInt level_flags; |
| 876 | 876 | ||
| 877 | if (s->strategy >= Z_HUFFMAN_ONLY || s->level < 2) | 877 | if (s->strategy >= Z_HUFFMAN_ONLY || s->level < 2) |
| @@ -1131,7 +1131,7 @@ int ZEXPORT deflate (strm, flush) | |||
| 1131 | } | 1131 | } |
| 1132 | 1132 | ||
| 1133 | /* ========================================================================= */ | 1133 | /* ========================================================================= */ |
| 1134 | int ZEXPORT deflateEnd (strm) | 1134 | int ZEXPORT deflateEnd(strm) |
| 1135 | z_streamp strm; | 1135 | z_streamp strm; |
| 1136 | { | 1136 | { |
| 1137 | int status; | 1137 | int status; |
| @@ -1157,7 +1157,7 @@ int ZEXPORT deflateEnd (strm) | |||
| 1157 | * To simplify the source, this is not supported for 16-bit MSDOS (which | 1157 | * To simplify the source, this is not supported for 16-bit MSDOS (which |
| 1158 | * doesn't have enough memory anyway to duplicate compression states). | 1158 | * doesn't have enough memory anyway to duplicate compression states). |
| 1159 | */ | 1159 | */ |
| 1160 | int ZEXPORT deflateCopy (dest, source) | 1160 | int ZEXPORT deflateCopy(dest, source) |
| 1161 | z_streamp dest; | 1161 | z_streamp dest; |
| 1162 | z_streamp source; | 1162 | z_streamp source; |
| 1163 | { | 1163 | { |
| @@ -1246,7 +1246,7 @@ local unsigned read_buf(strm, buf, size) | |||
| 1246 | /* =========================================================================== | 1246 | /* =========================================================================== |
| 1247 | * Initialize the "longest match" routines for a new zlib stream | 1247 | * Initialize the "longest match" routines for a new zlib stream |
| 1248 | */ | 1248 | */ |
| 1249 | local void lm_init (s) | 1249 | local void lm_init(s) |
| 1250 | deflate_state *s; | 1250 | deflate_state *s; |
| 1251 | { | 1251 | { |
| 1252 | s->window_size = (ulg)2L*s->w_size; | 1252 | s->window_size = (ulg)2L*s->w_size; |
| @@ -1312,10 +1312,10 @@ local uInt longest_match(s, cur_match) | |||
| 1312 | */ | 1312 | */ |
| 1313 | register Bytef *strend = s->window + s->strstart + MAX_MATCH - 1; | 1313 | register Bytef *strend = s->window + s->strstart + MAX_MATCH - 1; |
| 1314 | register ush scan_start = *(ushf*)scan; | 1314 | register ush scan_start = *(ushf*)scan; |
| 1315 | register ush scan_end = *(ushf*)(scan+best_len-1); | 1315 | register ush scan_end = *(ushf*)(scan + best_len - 1); |
| 1316 | #else | 1316 | #else |
| 1317 | register Bytef *strend = s->window + s->strstart + MAX_MATCH; | 1317 | register Bytef *strend = s->window + s->strstart + MAX_MATCH; |
| 1318 | register Byte scan_end1 = scan[best_len-1]; | 1318 | register Byte scan_end1 = scan[best_len - 1]; |
| 1319 | register Byte scan_end = scan[best_len]; | 1319 | register Byte scan_end = scan[best_len]; |
| 1320 | #endif | 1320 | #endif |
| 1321 | 1321 | ||
| @@ -1333,7 +1333,8 @@ local uInt longest_match(s, cur_match) | |||
| 1333 | */ | 1333 | */ |
| 1334 | if ((uInt)nice_match > s->lookahead) nice_match = (int)s->lookahead; | 1334 | if ((uInt)nice_match > s->lookahead) nice_match = (int)s->lookahead; |
| 1335 | 1335 | ||
| 1336 | Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead"); | 1336 | Assert((ulg)s->strstart <= s->window_size - MIN_LOOKAHEAD, |
| 1337 | "need lookahead"); | ||
| 1337 | 1338 | ||
| 1338 | do { | 1339 | do { |
| 1339 | Assert(cur_match < s->strstart, "no future"); | 1340 | Assert(cur_match < s->strstart, "no future"); |
| @@ -1351,43 +1352,44 @@ local uInt longest_match(s, cur_match) | |||
| 1351 | /* This code assumes sizeof(unsigned short) == 2. Do not use | 1352 | /* This code assumes sizeof(unsigned short) == 2. Do not use |
| 1352 | * UNALIGNED_OK if your compiler uses a different size. | 1353 | * UNALIGNED_OK if your compiler uses a different size. |
| 1353 | */ | 1354 | */ |
| 1354 | if (*(ushf*)(match+best_len-1) != scan_end || | 1355 | if (*(ushf*)(match + best_len - 1) != scan_end || |
| 1355 | *(ushf*)match != scan_start) continue; | 1356 | *(ushf*)match != scan_start) continue; |
| 1356 | 1357 | ||
| 1357 | /* It is not necessary to compare scan[2] and match[2] since they are | 1358 | /* It is not necessary to compare scan[2] and match[2] since they are |
| 1358 | * always equal when the other bytes match, given that the hash keys | 1359 | * always equal when the other bytes match, given that the hash keys |
| 1359 | * are equal and that HASH_BITS >= 8. Compare 2 bytes at a time at | 1360 | * are equal and that HASH_BITS >= 8. Compare 2 bytes at a time at |
| 1360 | * strstart+3, +5, ... up to strstart+257. We check for insufficient | 1361 | * strstart + 3, + 5, up to strstart + 257. We check for insufficient |
| 1361 | * lookahead only every 4th comparison; the 128th check will be made | 1362 | * lookahead only every 4th comparison; the 128th check will be made |
| 1362 | * at strstart+257. If MAX_MATCH-2 is not a multiple of 8, it is | 1363 | * at strstart + 257. If MAX_MATCH-2 is not a multiple of 8, it is |
| 1363 | * necessary to put more guard bytes at the end of the window, or | 1364 | * necessary to put more guard bytes at the end of the window, or |
| 1364 | * to check more often for insufficient lookahead. | 1365 | * to check more often for insufficient lookahead. |
| 1365 | */ | 1366 | */ |
| 1366 | Assert(scan[2] == match[2], "scan[2]?"); | 1367 | Assert(scan[2] == match[2], "scan[2]?"); |
| 1367 | scan++, match++; | 1368 | scan++, match++; |
| 1368 | do { | 1369 | do { |
| 1369 | } while (*(ushf*)(scan+=2) == *(ushf*)(match+=2) && | 1370 | } while (*(ushf*)(scan += 2) == *(ushf*)(match += 2) && |
| 1370 | *(ushf*)(scan+=2) == *(ushf*)(match+=2) && | 1371 | *(ushf*)(scan += 2) == *(ushf*)(match += 2) && |
| 1371 | *(ushf*)(scan+=2) == *(ushf*)(match+=2) && | 1372 | *(ushf*)(scan += 2) == *(ushf*)(match += 2) && |
| 1372 | *(ushf*)(scan+=2) == *(ushf*)(match+=2) && | 1373 | *(ushf*)(scan += 2) == *(ushf*)(match += 2) && |
| 1373 | scan < strend); | 1374 | scan < strend); |
| 1374 | /* The funny "do {}" generates better code on most compilers */ | 1375 | /* The funny "do {}" generates better code on most compilers */ |
| 1375 | 1376 | ||
| 1376 | /* Here, scan <= window+strstart+257 */ | 1377 | /* Here, scan <= window + strstart + 257 */ |
| 1377 | Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan"); | 1378 | Assert(scan <= s->window + (unsigned)(s->window_size - 1), |
| 1379 | "wild scan"); | ||
| 1378 | if (*scan == *match) scan++; | 1380 | if (*scan == *match) scan++; |
| 1379 | 1381 | ||
| 1380 | len = (MAX_MATCH - 1) - (int)(strend-scan); | 1382 | len = (MAX_MATCH - 1) - (int)(strend - scan); |
| 1381 | scan = strend - (MAX_MATCH-1); | 1383 | scan = strend - (MAX_MATCH-1); |
| 1382 | 1384 | ||
| 1383 | #else /* UNALIGNED_OK */ | 1385 | #else /* UNALIGNED_OK */ |
| 1384 | 1386 | ||
| 1385 | if (match[best_len] != scan_end || | 1387 | if (match[best_len] != scan_end || |
| 1386 | match[best_len-1] != scan_end1 || | 1388 | match[best_len - 1] != scan_end1 || |
| 1387 | *match != *scan || | 1389 | *match != *scan || |
| 1388 | *++match != scan[1]) continue; | 1390 | *++match != scan[1]) continue; |
| 1389 | 1391 | ||
| 1390 | /* The check at best_len-1 can be removed because it will be made | 1392 | /* The check at best_len - 1 can be removed because it will be made |
| 1391 | * again later. (This heuristic is not always a win.) | 1393 | * again later. (This heuristic is not always a win.) |
| 1392 | * It is not necessary to compare scan[2] and match[2] since they | 1394 | * It is not necessary to compare scan[2] and match[2] since they |
| 1393 | * are always equal when the other bytes match, given that | 1395 | * are always equal when the other bytes match, given that |
| @@ -1397,7 +1399,7 @@ local uInt longest_match(s, cur_match) | |||
| 1397 | Assert(*scan == *match, "match[2]?"); | 1399 | Assert(*scan == *match, "match[2]?"); |
| 1398 | 1400 | ||
| 1399 | /* We check for insufficient lookahead only every 8th comparison; | 1401 | /* We check for insufficient lookahead only every 8th comparison; |
| 1400 | * the 256th check will be made at strstart+258. | 1402 | * the 256th check will be made at strstart + 258. |
| 1401 | */ | 1403 | */ |
| 1402 | do { | 1404 | do { |
| 1403 | } while (*++scan == *++match && *++scan == *++match && | 1405 | } while (*++scan == *++match && *++scan == *++match && |
| @@ -1406,7 +1408,8 @@ local uInt longest_match(s, cur_match) | |||
| 1406 | *++scan == *++match && *++scan == *++match && | 1408 | *++scan == *++match && *++scan == *++match && |
| 1407 | scan < strend); | 1409 | scan < strend); |
| 1408 | 1410 | ||
| 1409 | Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan"); | 1411 | Assert(scan <= s->window + (unsigned)(s->window_size - 1), |
| 1412 | "wild scan"); | ||
| 1410 | 1413 | ||
| 1411 | len = MAX_MATCH - (int)(strend - scan); | 1414 | len = MAX_MATCH - (int)(strend - scan); |
| 1412 | scan = strend - MAX_MATCH; | 1415 | scan = strend - MAX_MATCH; |
| @@ -1418,9 +1421,9 @@ local uInt longest_match(s, cur_match) | |||
| 1418 | best_len = len; | 1421 | best_len = len; |
| 1419 | if (len >= nice_match) break; | 1422 | if (len >= nice_match) break; |
| 1420 | #ifdef UNALIGNED_OK | 1423 | #ifdef UNALIGNED_OK |
| 1421 | scan_end = *(ushf*)(scan+best_len-1); | 1424 | scan_end = *(ushf*)(scan + best_len - 1); |
| 1422 | #else | 1425 | #else |
| 1423 | scan_end1 = scan[best_len-1]; | 1426 | scan_end1 = scan[best_len - 1]; |
| 1424 | scan_end = scan[best_len]; | 1427 | scan_end = scan[best_len]; |
| 1425 | #endif | 1428 | #endif |
| 1426 | } | 1429 | } |
| @@ -1451,7 +1454,8 @@ local uInt longest_match(s, cur_match) | |||
| 1451 | */ | 1454 | */ |
| 1452 | Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever"); | 1455 | Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever"); |
| 1453 | 1456 | ||
| 1454 | Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead"); | 1457 | Assert((ulg)s->strstart <= s->window_size - MIN_LOOKAHEAD, |
| 1458 | "need lookahead"); | ||
| 1455 | 1459 | ||
| 1456 | Assert(cur_match < s->strstart, "no future"); | 1460 | Assert(cur_match < s->strstart, "no future"); |
| 1457 | 1461 | ||
| @@ -1461,7 +1465,7 @@ local uInt longest_match(s, cur_match) | |||
| 1461 | */ | 1465 | */ |
| 1462 | if (match[0] != scan[0] || match[1] != scan[1]) return MIN_MATCH-1; | 1466 | if (match[0] != scan[0] || match[1] != scan[1]) return MIN_MATCH-1; |
| 1463 | 1467 | ||
| 1464 | /* The check at best_len-1 can be removed because it will be made | 1468 | /* The check at best_len - 1 can be removed because it will be made |
| 1465 | * again later. (This heuristic is not always a win.) | 1469 | * again later. (This heuristic is not always a win.) |
| 1466 | * It is not necessary to compare scan[2] and match[2] since they | 1470 | * It is not necessary to compare scan[2] and match[2] since they |
| 1467 | * are always equal when the other bytes match, given that | 1471 | * are always equal when the other bytes match, given that |
| @@ -1471,7 +1475,7 @@ local uInt longest_match(s, cur_match) | |||
| 1471 | Assert(*scan == *match, "match[2]?"); | 1475 | Assert(*scan == *match, "match[2]?"); |
| 1472 | 1476 | ||
| 1473 | /* We check for insufficient lookahead only every 8th comparison; | 1477 | /* We check for insufficient lookahead only every 8th comparison; |
| 1474 | * the 256th check will be made at strstart+258. | 1478 | * the 256th check will be made at strstart + 258. |
| 1475 | */ | 1479 | */ |
| 1476 | do { | 1480 | do { |
| 1477 | } while (*++scan == *++match && *++scan == *++match && | 1481 | } while (*++scan == *++match && *++scan == *++match && |
| @@ -1480,7 +1484,7 @@ local uInt longest_match(s, cur_match) | |||
| 1480 | *++scan == *++match && *++scan == *++match && | 1484 | *++scan == *++match && *++scan == *++match && |
| 1481 | scan < strend); | 1485 | scan < strend); |
| 1482 | 1486 | ||
| 1483 | Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan"); | 1487 | Assert(scan <= s->window + (unsigned)(s->window_size - 1), "wild scan"); |
| 1484 | 1488 | ||
| 1485 | len = MAX_MATCH - (int)(strend - scan); | 1489 | len = MAX_MATCH - (int)(strend - scan); |
| 1486 | 1490 | ||
| @@ -1516,7 +1520,7 @@ local void check_match(s, start, match, length) | |||
| 1516 | z_error("invalid match"); | 1520 | z_error("invalid match"); |
| 1517 | } | 1521 | } |
| 1518 | if (z_verbose > 1) { | 1522 | if (z_verbose > 1) { |
| 1519 | fprintf(stderr,"\\[%d,%d]", start-match, length); | 1523 | fprintf(stderr,"\\[%d,%d]", start - match, length); |
| 1520 | do { putc(s->window[start++], stderr); } while (--length != 0); | 1524 | do { putc(s->window[start++], stderr); } while (--length != 0); |
| 1521 | } | 1525 | } |
| 1522 | } | 1526 | } |
| @@ -1562,9 +1566,9 @@ local void fill_window(s) | |||
| 1562 | /* If the window is almost full and there is insufficient lookahead, | 1566 | /* If the window is almost full and there is insufficient lookahead, |
| 1563 | * move the upper half to the lower one to make room in the upper half. | 1567 | * move the upper half to the lower one to make room in the upper half. |
| 1564 | */ | 1568 | */ |
| 1565 | if (s->strstart >= wsize+MAX_DIST(s)) { | 1569 | if (s->strstart >= wsize + MAX_DIST(s)) { |
| 1566 | 1570 | ||
| 1567 | zmemcpy(s->window, s->window+wsize, (unsigned)wsize - more); | 1571 | zmemcpy(s->window, s->window + wsize, (unsigned)wsize - more); |
| 1568 | s->match_start -= wsize; | 1572 | s->match_start -= wsize; |
| 1569 | s->strstart -= wsize; /* we now have strstart >= MAX_DIST */ | 1573 | s->strstart -= wsize; /* we now have strstart >= MAX_DIST */ |
| 1570 | s->block_start -= (long) wsize; | 1574 | s->block_start -= (long) wsize; |
| @@ -1905,7 +1909,7 @@ local block_state deflate_fast(s, flush) | |||
| 1905 | if (s->lookahead == 0) break; /* flush the current block */ | 1909 | if (s->lookahead == 0) break; /* flush the current block */ |
| 1906 | } | 1910 | } |
| 1907 | 1911 | ||
| 1908 | /* Insert the string window[strstart .. strstart+2] in the | 1912 | /* Insert the string window[strstart .. strstart + 2] in the |
| 1909 | * dictionary, and set hash_head to the head of the hash chain: | 1913 | * dictionary, and set hash_head to the head of the hash chain: |
| 1910 | */ | 1914 | */ |
| 1911 | hash_head = NIL; | 1915 | hash_head = NIL; |
| @@ -1953,7 +1957,7 @@ local block_state deflate_fast(s, flush) | |||
| 1953 | s->strstart += s->match_length; | 1957 | s->strstart += s->match_length; |
| 1954 | s->match_length = 0; | 1958 | s->match_length = 0; |
| 1955 | s->ins_h = s->window[s->strstart]; | 1959 | s->ins_h = s->window[s->strstart]; |
| 1956 | UPDATE_HASH(s, s->ins_h, s->window[s->strstart+1]); | 1960 | UPDATE_HASH(s, s->ins_h, s->window[s->strstart + 1]); |
| 1957 | #if MIN_MATCH != 3 | 1961 | #if MIN_MATCH != 3 |
| 1958 | Call UPDATE_HASH() MIN_MATCH-3 more times | 1962 | Call UPDATE_HASH() MIN_MATCH-3 more times |
| 1959 | #endif | 1963 | #endif |
| @@ -1964,7 +1968,7 @@ local block_state deflate_fast(s, flush) | |||
| 1964 | } else { | 1968 | } else { |
| 1965 | /* No match, output a literal byte */ | 1969 | /* No match, output a literal byte */ |
| 1966 | Tracevv((stderr,"%c", s->window[s->strstart])); | 1970 | Tracevv((stderr,"%c", s->window[s->strstart])); |
| 1967 | _tr_tally_lit (s, s->window[s->strstart], bflush); | 1971 | _tr_tally_lit(s, s->window[s->strstart], bflush); |
| 1968 | s->lookahead--; | 1972 | s->lookahead--; |
| 1969 | s->strstart++; | 1973 | s->strstart++; |
| 1970 | } | 1974 | } |
| @@ -2008,7 +2012,7 @@ local block_state deflate_slow(s, flush) | |||
| 2008 | if (s->lookahead == 0) break; /* flush the current block */ | 2012 | if (s->lookahead == 0) break; /* flush the current block */ |
| 2009 | } | 2013 | } |
| 2010 | 2014 | ||
| 2011 | /* Insert the string window[strstart .. strstart+2] in the | 2015 | /* Insert the string window[strstart .. strstart + 2] in the |
| 2012 | * dictionary, and set hash_head to the head of the hash chain: | 2016 | * dictionary, and set hash_head to the head of the hash chain: |
| 2013 | */ | 2017 | */ |
| 2014 | hash_head = NIL; | 2018 | hash_head = NIL; |
| @@ -2050,17 +2054,17 @@ local block_state deflate_slow(s, flush) | |||
| 2050 | uInt max_insert = s->strstart + s->lookahead - MIN_MATCH; | 2054 | uInt max_insert = s->strstart + s->lookahead - MIN_MATCH; |
| 2051 | /* Do not insert strings in hash table beyond this. */ | 2055 | /* Do not insert strings in hash table beyond this. */ |
| 2052 | 2056 | ||
| 2053 | check_match(s, s->strstart-1, s->prev_match, s->prev_length); | 2057 | check_match(s, s->strstart - 1, s->prev_match, s->prev_length); |
| 2054 | 2058 | ||
| 2055 | _tr_tally_dist(s, s->strstart -1 - s->prev_match, | 2059 | _tr_tally_dist(s, s->strstart - 1 - s->prev_match, |
| 2056 | s->prev_length - MIN_MATCH, bflush); | 2060 | s->prev_length - MIN_MATCH, bflush); |
| 2057 | 2061 | ||
| 2058 | /* Insert in hash table all strings up to the end of the match. | 2062 | /* Insert in hash table all strings up to the end of the match. |
| 2059 | * strstart-1 and strstart are already inserted. If there is not | 2063 | * strstart - 1 and strstart are already inserted. If there is not |
| 2060 | * enough lookahead, the last two strings are not inserted in | 2064 | * enough lookahead, the last two strings are not inserted in |
| 2061 | * the hash table. | 2065 | * the hash table. |
| 2062 | */ | 2066 | */ |
| 2063 | s->lookahead -= s->prev_length-1; | 2067 | s->lookahead -= s->prev_length - 1; |
| 2064 | s->prev_length -= 2; | 2068 | s->prev_length -= 2; |
| 2065 | do { | 2069 | do { |
| 2066 | if (++s->strstart <= max_insert) { | 2070 | if (++s->strstart <= max_insert) { |
| @@ -2078,8 +2082,8 @@ local block_state deflate_slow(s, flush) | |||
| 2078 | * single literal. If there was a match but the current match | 2082 | * single literal. If there was a match but the current match |
| 2079 | * is longer, truncate the previous match to a single literal. | 2083 | * is longer, truncate the previous match to a single literal. |
| 2080 | */ | 2084 | */ |
| 2081 | Tracevv((stderr,"%c", s->window[s->strstart-1])); | 2085 | Tracevv((stderr,"%c", s->window[s->strstart - 1])); |
| 2082 | _tr_tally_lit(s, s->window[s->strstart-1], bflush); | 2086 | _tr_tally_lit(s, s->window[s->strstart - 1], bflush); |
| 2083 | if (bflush) { | 2087 | if (bflush) { |
| 2084 | FLUSH_BLOCK_ONLY(s, 0); | 2088 | FLUSH_BLOCK_ONLY(s, 0); |
| 2085 | } | 2089 | } |
| @@ -2097,8 +2101,8 @@ local block_state deflate_slow(s, flush) | |||
| 2097 | } | 2101 | } |
| 2098 | Assert (flush != Z_NO_FLUSH, "no flush?"); | 2102 | Assert (flush != Z_NO_FLUSH, "no flush?"); |
| 2099 | if (s->match_available) { | 2103 | if (s->match_available) { |
| 2100 | Tracevv((stderr,"%c", s->window[s->strstart-1])); | 2104 | Tracevv((stderr,"%c", s->window[s->strstart - 1])); |
| 2101 | _tr_tally_lit(s, s->window[s->strstart-1], bflush); | 2105 | _tr_tally_lit(s, s->window[s->strstart - 1], bflush); |
| 2102 | s->match_available = 0; | 2106 | s->match_available = 0; |
| 2103 | } | 2107 | } |
| 2104 | s->insert = s->strstart < MIN_MATCH-1 ? s->strstart : MIN_MATCH-1; | 2108 | s->insert = s->strstart < MIN_MATCH-1 ? s->strstart : MIN_MATCH-1; |
| @@ -2155,7 +2159,8 @@ local block_state deflate_rle(s, flush) | |||
| 2155 | if (s->match_length > s->lookahead) | 2159 | if (s->match_length > s->lookahead) |
| 2156 | s->match_length = s->lookahead; | 2160 | s->match_length = s->lookahead; |
| 2157 | } | 2161 | } |
| 2158 | Assert(scan <= s->window+(uInt)(s->window_size-1), "wild scan"); | 2162 | Assert(scan <= s->window + (uInt)(s->window_size - 1), |
| 2163 | "wild scan"); | ||
| 2159 | } | 2164 | } |
| 2160 | 2165 | ||
| 2161 | /* Emit match if have run of MIN_MATCH or longer, else emit literal */ | 2166 | /* Emit match if have run of MIN_MATCH or longer, else emit literal */ |
| @@ -2170,7 +2175,7 @@ local block_state deflate_rle(s, flush) | |||
| 2170 | } else { | 2175 | } else { |
| 2171 | /* No match, output a literal byte */ | 2176 | /* No match, output a literal byte */ |
| 2172 | Tracevv((stderr,"%c", s->window[s->strstart])); | 2177 | Tracevv((stderr,"%c", s->window[s->strstart])); |
| 2173 | _tr_tally_lit (s, s->window[s->strstart], bflush); | 2178 | _tr_tally_lit(s, s->window[s->strstart], bflush); |
| 2174 | s->lookahead--; | 2179 | s->lookahead--; |
| 2175 | s->strstart++; | 2180 | s->strstart++; |
| 2176 | } | 2181 | } |
| @@ -2210,7 +2215,7 @@ local block_state deflate_huff(s, flush) | |||
| 2210 | /* Output a literal byte */ | 2215 | /* Output a literal byte */ |
| 2211 | s->match_length = 0; | 2216 | s->match_length = 0; |
| 2212 | Tracevv((stderr,"%c", s->window[s->strstart])); | 2217 | Tracevv((stderr,"%c", s->window[s->strstart])); |
| 2213 | _tr_tally_lit (s, s->window[s->strstart], bflush); | 2218 | _tr_tally_lit(s, s->window[s->strstart], bflush); |
| 2214 | s->lookahead--; | 2219 | s->lookahead--; |
| 2215 | s->strstart++; | 2220 | s->strstart++; |
| 2216 | if (bflush) FLUSH_BLOCK(s, 0); | 2221 | if (bflush) FLUSH_BLOCK(s, 0); |
| @@ -30,7 +30,7 @@ local gzFile gz_open OF((const void *, int, const char *)); | |||
| 30 | 30 | ||
| 31 | The gz_strwinerror function does not change the current setting of | 31 | The gz_strwinerror function does not change the current setting of |
| 32 | GetLastError. */ | 32 | GetLastError. */ |
| 33 | char ZLIB_INTERNAL *gz_strwinerror (error) | 33 | char ZLIB_INTERNAL *gz_strwinerror(error) |
| 34 | DWORD error; | 34 | DWORD error; |
| 35 | { | 35 | { |
| 36 | static char buf[1024]; | 36 | static char buf[1024]; |
| @@ -474,7 +474,7 @@ int ZEXPORTVA gzprintf(gzFile file, const char *format, ...) | |||
| 474 | #else /* !STDC && !Z_HAVE_STDARG_H */ | 474 | #else /* !STDC && !Z_HAVE_STDARG_H */ |
| 475 | 475 | ||
| 476 | /* -- see zlib.h -- */ | 476 | /* -- see zlib.h -- */ |
| 477 | int ZEXPORTVA gzprintf (file, format, a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, | 477 | int ZEXPORTVA gzprintf(file, format, a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, |
| 478 | a11, a12, a13, a14, a15, a16, a17, a18, a19, a20) | 478 | a11, a12, a13, a14, a15, a16, a17, a18, a19, a20) |
| 479 | gzFile file; | 479 | gzFile file; |
| 480 | const char *format; | 480 | const char *format; |
| @@ -193,7 +193,7 @@ local void send_bits(s, value, length) | |||
| 193 | s->bits_sent += (ulg)length; | 193 | s->bits_sent += (ulg)length; |
| 194 | 194 | ||
| 195 | /* If not enough room in bi_buf, use (valid) bits from bi_buf and | 195 | /* If not enough room in bi_buf, use (valid) bits from bi_buf and |
| 196 | * (16 - bi_valid) bits from value, leaving (width - (16-bi_valid)) | 196 | * (16 - bi_valid) bits from value, leaving (width - (16 - bi_valid)) |
| 197 | * unused bits in value. | 197 | * unused bits in value. |
| 198 | */ | 198 | */ |
| 199 | if (s->bi_valid > (int)Buf_size - length) { | 199 | if (s->bi_valid > (int)Buf_size - length) { |
| @@ -256,7 +256,7 @@ local void tr_static_init() | |||
| 256 | length = 0; | 256 | length = 0; |
| 257 | for (code = 0; code < LENGTH_CODES-1; code++) { | 257 | for (code = 0; code < LENGTH_CODES-1; code++) { |
| 258 | base_length[code] = length; | 258 | base_length[code] = length; |
| 259 | for (n = 0; n < (1<<extra_lbits[code]); n++) { | 259 | for (n = 0; n < (1 << extra_lbits[code]); n++) { |
| 260 | _length_code[length++] = (uch)code; | 260 | _length_code[length++] = (uch)code; |
| 261 | } | 261 | } |
| 262 | } | 262 | } |
| @@ -265,13 +265,13 @@ local void tr_static_init() | |||
| 265 | * in two different ways: code 284 + 5 bits or code 285, so we | 265 | * in two different ways: code 284 + 5 bits or code 285, so we |
| 266 | * overwrite length_code[255] to use the best encoding: | 266 | * overwrite length_code[255] to use the best encoding: |
| 267 | */ | 267 | */ |
| 268 | _length_code[length-1] = (uch)code; | 268 | _length_code[length - 1] = (uch)code; |
| 269 | 269 | ||
| 270 | /* Initialize the mapping dist (0..32K) -> dist code (0..29) */ | 270 | /* Initialize the mapping dist (0..32K) -> dist code (0..29) */ |
| 271 | dist = 0; | 271 | dist = 0; |
| 272 | for (code = 0 ; code < 16; code++) { | 272 | for (code = 0 ; code < 16; code++) { |
| 273 | base_dist[code] = dist; | 273 | base_dist[code] = dist; |
| 274 | for (n = 0; n < (1<<extra_dbits[code]); n++) { | 274 | for (n = 0; n < (1 << extra_dbits[code]); n++) { |
| 275 | _dist_code[dist++] = (uch)code; | 275 | _dist_code[dist++] = (uch)code; |
| 276 | } | 276 | } |
| 277 | } | 277 | } |
| @@ -279,11 +279,11 @@ local void tr_static_init() | |||
| 279 | dist >>= 7; /* from now on, all distances are divided by 128 */ | 279 | dist >>= 7; /* from now on, all distances are divided by 128 */ |
| 280 | for ( ; code < D_CODES; code++) { | 280 | for ( ; code < D_CODES; code++) { |
| 281 | base_dist[code] = dist << 7; | 281 | base_dist[code] = dist << 7; |
| 282 | for (n = 0; n < (1<<(extra_dbits[code]-7)); n++) { | 282 | for (n = 0; n < (1 << (extra_dbits[code] - 7)); n++) { |
| 283 | _dist_code[256 + dist++] = (uch)code; | 283 | _dist_code[256 + dist++] = (uch)code; |
| 284 | } | 284 | } |
| 285 | } | 285 | } |
| 286 | Assert (dist == 256, "tr_static_init: 256+dist != 512"); | 286 | Assert (dist == 256, "tr_static_init: 256 + dist != 512"); |
| 287 | 287 | ||
| 288 | /* Construct the codes of the static literal tree */ | 288 | /* Construct the codes of the static literal tree */ |
| 289 | for (bits = 0; bits <= MAX_BITS; bits++) bl_count[bits] = 0; | 289 | for (bits = 0; bits <= MAX_BITS; bits++) bl_count[bits] = 0; |
| @@ -321,7 +321,7 @@ local void tr_static_init() | |||
| 321 | 321 | ||
| 322 | # define SEPARATOR(i, last, width) \ | 322 | # define SEPARATOR(i, last, width) \ |
| 323 | ((i) == (last)? "\n};\n\n" : \ | 323 | ((i) == (last)? "\n};\n\n" : \ |
| 324 | ((i) % (width) == (width)-1 ? ",\n" : ", ")) | 324 | ((i) % (width) == (width) - 1 ? ",\n" : ", ")) |
| 325 | 325 | ||
| 326 | void gen_trees_header() | 326 | void gen_trees_header() |
| 327 | { | 327 | { |
| @@ -458,7 +458,7 @@ local void pqdownheap(s, tree, k) | |||
| 458 | while (j <= s->heap_len) { | 458 | while (j <= s->heap_len) { |
| 459 | /* Set j to the smallest of the two sons: */ | 459 | /* Set j to the smallest of the two sons: */ |
| 460 | if (j < s->heap_len && | 460 | if (j < s->heap_len && |
| 461 | smaller(tree, s->heap[j+1], s->heap[j], s->depth)) { | 461 | smaller(tree, s->heap[j + 1], s->heap[j], s->depth)) { |
| 462 | j++; | 462 | j++; |
| 463 | } | 463 | } |
| 464 | /* Exit if v is smaller than both sons */ | 464 | /* Exit if v is smaller than both sons */ |
| @@ -507,7 +507,7 @@ local void gen_bitlen(s, desc) | |||
| 507 | */ | 507 | */ |
| 508 | tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */ | 508 | tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */ |
| 509 | 509 | ||
| 510 | for (h = s->heap_max+1; h < HEAP_SIZE; h++) { | 510 | for (h = s->heap_max + 1; h < HEAP_SIZE; h++) { |
| 511 | n = s->heap[h]; | 511 | n = s->heap[h]; |
| 512 | bits = tree[tree[n].Dad].Len + 1; | 512 | bits = tree[tree[n].Dad].Len + 1; |
| 513 | if (bits > max_length) bits = max_length, overflow++; | 513 | if (bits > max_length) bits = max_length, overflow++; |
| @@ -518,7 +518,7 @@ local void gen_bitlen(s, desc) | |||
| 518 | 518 | ||
| 519 | s->bl_count[bits]++; | 519 | s->bl_count[bits]++; |
| 520 | xbits = 0; | 520 | xbits = 0; |
| 521 | if (n >= base) xbits = extra[n-base]; | 521 | if (n >= base) xbits = extra[n - base]; |
| 522 | f = tree[n].Freq; | 522 | f = tree[n].Freq; |
| 523 | s->opt_len += (ulg)f * (unsigned)(bits + xbits); | 523 | s->opt_len += (ulg)f * (unsigned)(bits + xbits); |
| 524 | if (stree) s->static_len += (ulg)f * (unsigned)(stree[n].Len + xbits); | 524 | if (stree) s->static_len += (ulg)f * (unsigned)(stree[n].Len + xbits); |
| @@ -530,10 +530,10 @@ local void gen_bitlen(s, desc) | |||
| 530 | 530 | ||
| 531 | /* Find the first bit length which could increase: */ | 531 | /* Find the first bit length which could increase: */ |
| 532 | do { | 532 | do { |
| 533 | bits = max_length-1; | 533 | bits = max_length - 1; |
| 534 | while (s->bl_count[bits] == 0) bits--; | 534 | while (s->bl_count[bits] == 0) bits--; |
| 535 | s->bl_count[bits]--; /* move one leaf down the tree */ | 535 | s->bl_count[bits]--; /* move one leaf down the tree */ |
| 536 | s->bl_count[bits+1] += 2; /* move one overflow item as its brother */ | 536 | s->bl_count[bits + 1] += 2; /* move one overflow item as its brother */ |
| 537 | s->bl_count[max_length]--; | 537 | s->bl_count[max_length]--; |
| 538 | /* The brother of the overflow item also moves one step up, | 538 | /* The brother of the overflow item also moves one step up, |
| 539 | * but this does not affect bl_count[max_length] | 539 | * but this does not affect bl_count[max_length] |
| @@ -569,7 +569,7 @@ local void gen_bitlen(s, desc) | |||
| 569 | * OUT assertion: the field code is set for all tree elements of non | 569 | * OUT assertion: the field code is set for all tree elements of non |
| 570 | * zero code length. | 570 | * zero code length. |
| 571 | */ | 571 | */ |
| 572 | local void gen_codes (tree, max_code, bl_count) | 572 | local void gen_codes(tree, max_code, bl_count) |
| 573 | ct_data *tree; /* the tree to decorate */ | 573 | ct_data *tree; /* the tree to decorate */ |
| 574 | int max_code; /* largest code with non zero frequency */ | 574 | int max_code; /* largest code with non zero frequency */ |
| 575 | ushf *bl_count; /* number of codes at each bit length */ | 575 | ushf *bl_count; /* number of codes at each bit length */ |
| @@ -583,13 +583,13 @@ local void gen_codes (tree, max_code, bl_count) | |||
| 583 | * without bit reversal. | 583 | * without bit reversal. |
| 584 | */ | 584 | */ |
| 585 | for (bits = 1; bits <= MAX_BITS; bits++) { | 585 | for (bits = 1; bits <= MAX_BITS; bits++) { |
| 586 | code = (code + bl_count[bits-1]) << 1; | 586 | code = (code + bl_count[bits - 1]) << 1; |
| 587 | next_code[bits] = (ush)code; | 587 | next_code[bits] = (ush)code; |
| 588 | } | 588 | } |
| 589 | /* Check that the bit counts in bl_count are consistent. The last code | 589 | /* Check that the bit counts in bl_count are consistent. The last code |
| 590 | * must be all ones. | 590 | * must be all ones. |
| 591 | */ | 591 | */ |
| 592 | Assert (code + bl_count[MAX_BITS]-1 == (1<<MAX_BITS)-1, | 592 | Assert (code + bl_count[MAX_BITS] - 1 == (1 << MAX_BITS) - 1, |
| 593 | "inconsistent bit counts"); | 593 | "inconsistent bit counts"); |
| 594 | Tracev((stderr,"\ngen_codes: max_code %d ", max_code)); | 594 | Tracev((stderr,"\ngen_codes: max_code %d ", max_code)); |
| 595 | 595 | ||
| @@ -600,7 +600,7 @@ local void gen_codes (tree, max_code, bl_count) | |||
| 600 | tree[n].Code = (ush)bi_reverse(next_code[len]++, len); | 600 | tree[n].Code = (ush)bi_reverse(next_code[len]++, len); |
| 601 | 601 | ||
| 602 | Tracecv(tree != static_ltree, (stderr,"\nn %3d %c l %2d c %4x (%x) ", | 602 | Tracecv(tree != static_ltree, (stderr,"\nn %3d %c l %2d c %4x (%x) ", |
| 603 | n, (isgraph(n) ? n : ' '), len, tree[n].Code, next_code[len]-1)); | 603 | n, (isgraph(n) ? n : ' '), len, tree[n].Code, next_code[len] - 1)); |
| 604 | } | 604 | } |
| 605 | } | 605 | } |
| 606 | 606 | ||
| @@ -624,7 +624,7 @@ local void build_tree(s, desc) | |||
| 624 | int node; /* new node being created */ | 624 | int node; /* new node being created */ |
| 625 | 625 | ||
| 626 | /* Construct the initial heap, with least frequent element in | 626 | /* Construct the initial heap, with least frequent element in |
| 627 | * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1]. | 627 | * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n + 1]. |
| 628 | * heap[0] is not used. | 628 | * heap[0] is not used. |
| 629 | */ | 629 | */ |
| 630 | s->heap_len = 0, s->heap_max = HEAP_SIZE; | 630 | s->heap_len = 0, s->heap_max = HEAP_SIZE; |
| @@ -652,7 +652,7 @@ local void build_tree(s, desc) | |||
| 652 | } | 652 | } |
| 653 | desc->max_code = max_code; | 653 | desc->max_code = max_code; |
| 654 | 654 | ||
| 655 | /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree, | 655 | /* The elements heap[heap_len/2 + 1 .. heap_len] are leaves of the tree, |
| 656 | * establish sub-heaps of increasing lengths: | 656 | * establish sub-heaps of increasing lengths: |
| 657 | */ | 657 | */ |
| 658 | for (n = s->heap_len/2; n >= 1; n--) pqdownheap(s, tree, n); | 658 | for (n = s->heap_len/2; n >= 1; n--) pqdownheap(s, tree, n); |
| @@ -700,7 +700,7 @@ local void build_tree(s, desc) | |||
| 700 | * Scan a literal or distance tree to determine the frequencies of the codes | 700 | * Scan a literal or distance tree to determine the frequencies of the codes |
| 701 | * in the bit length tree. | 701 | * in the bit length tree. |
| 702 | */ | 702 | */ |
| 703 | local void scan_tree (s, tree, max_code) | 703 | local void scan_tree(s, tree, max_code) |
| 704 | deflate_state *s; | 704 | deflate_state *s; |
| 705 | ct_data *tree; /* the tree to be scanned */ | 705 | ct_data *tree; /* the tree to be scanned */ |
| 706 | int max_code; /* and its largest code of non zero frequency */ | 706 | int max_code; /* and its largest code of non zero frequency */ |
| @@ -714,10 +714,10 @@ local void scan_tree (s, tree, max_code) | |||
| 714 | int min_count = 4; /* min repeat count */ | 714 | int min_count = 4; /* min repeat count */ |
| 715 | 715 | ||
| 716 | if (nextlen == 0) max_count = 138, min_count = 3; | 716 | if (nextlen == 0) max_count = 138, min_count = 3; |
| 717 | tree[max_code+1].Len = (ush)0xffff; /* guard */ | 717 | tree[max_code + 1].Len = (ush)0xffff; /* guard */ |
| 718 | 718 | ||
| 719 | for (n = 0; n <= max_code; n++) { | 719 | for (n = 0; n <= max_code; n++) { |
| 720 | curlen = nextlen; nextlen = tree[n+1].Len; | 720 | curlen = nextlen; nextlen = tree[n + 1].Len; |
| 721 | if (++count < max_count && curlen == nextlen) { | 721 | if (++count < max_count && curlen == nextlen) { |
| 722 | continue; | 722 | continue; |
| 723 | } else if (count < min_count) { | 723 | } else if (count < min_count) { |
| @@ -745,7 +745,7 @@ local void scan_tree (s, tree, max_code) | |||
| 745 | * Send a literal or distance tree in compressed form, using the codes in | 745 | * Send a literal or distance tree in compressed form, using the codes in |
| 746 | * bl_tree. | 746 | * bl_tree. |
| 747 | */ | 747 | */ |
| 748 | local void send_tree (s, tree, max_code) | 748 | local void send_tree(s, tree, max_code) |
| 749 | deflate_state *s; | 749 | deflate_state *s; |
| 750 | ct_data *tree; /* the tree to be scanned */ | 750 | ct_data *tree; /* the tree to be scanned */ |
| 751 | int max_code; /* and its largest code of non zero frequency */ | 751 | int max_code; /* and its largest code of non zero frequency */ |
| @@ -758,11 +758,11 @@ local void send_tree (s, tree, max_code) | |||
| 758 | int max_count = 7; /* max repeat count */ | 758 | int max_count = 7; /* max repeat count */ |
| 759 | int min_count = 4; /* min repeat count */ | 759 | int min_count = 4; /* min repeat count */ |
| 760 | 760 | ||
| 761 | /* tree[max_code+1].Len = -1; */ /* guard already set */ | 761 | /* tree[max_code + 1].Len = -1; */ /* guard already set */ |
| 762 | if (nextlen == 0) max_count = 138, min_count = 3; | 762 | if (nextlen == 0) max_count = 138, min_count = 3; |
| 763 | 763 | ||
| 764 | for (n = 0; n <= max_code; n++) { | 764 | for (n = 0; n <= max_code; n++) { |
| 765 | curlen = nextlen; nextlen = tree[n+1].Len; | 765 | curlen = nextlen; nextlen = tree[n + 1].Len; |
| 766 | if (++count < max_count && curlen == nextlen) { | 766 | if (++count < max_count && curlen == nextlen) { |
| 767 | continue; | 767 | continue; |
| 768 | } else if (count < min_count) { | 768 | } else if (count < min_count) { |
| @@ -773,13 +773,13 @@ local void send_tree (s, tree, max_code) | |||
| 773 | send_code(s, curlen, s->bl_tree); count--; | 773 | send_code(s, curlen, s->bl_tree); count--; |
| 774 | } | 774 | } |
| 775 | Assert(count >= 3 && count <= 6, " 3_6?"); | 775 | Assert(count >= 3 && count <= 6, " 3_6?"); |
| 776 | send_code(s, REP_3_6, s->bl_tree); send_bits(s, count-3, 2); | 776 | send_code(s, REP_3_6, s->bl_tree); send_bits(s, count - 3, 2); |
| 777 | 777 | ||
| 778 | } else if (count <= 10) { | 778 | } else if (count <= 10) { |
| 779 | send_code(s, REPZ_3_10, s->bl_tree); send_bits(s, count-3, 3); | 779 | send_code(s, REPZ_3_10, s->bl_tree); send_bits(s, count - 3, 3); |
| 780 | 780 | ||
| 781 | } else { | 781 | } else { |
| 782 | send_code(s, REPZ_11_138, s->bl_tree); send_bits(s, count-11, 7); | 782 | send_code(s, REPZ_11_138, s->bl_tree); send_bits(s, count - 11, 7); |
| 783 | } | 783 | } |
| 784 | count = 0; prevlen = curlen; | 784 | count = 0; prevlen = curlen; |
| 785 | if (nextlen == 0) { | 785 | if (nextlen == 0) { |
| @@ -807,8 +807,8 @@ local int build_bl_tree(s) | |||
| 807 | 807 | ||
| 808 | /* Build the bit length tree: */ | 808 | /* Build the bit length tree: */ |
| 809 | build_tree(s, (tree_desc *)(&(s->bl_desc))); | 809 | build_tree(s, (tree_desc *)(&(s->bl_desc))); |
| 810 | /* opt_len now includes the length of the tree representations, except | 810 | /* opt_len now includes the length of the tree representations, except the |
| 811 | * the lengths of the bit lengths codes and the 5+5+4 bits for the counts. | 811 | * lengths of the bit lengths codes and the 5 + 5 + 4 bits for the counts. |
| 812 | */ | 812 | */ |
| 813 | 813 | ||
| 814 | /* Determine the number of bit length codes to send. The pkzip format | 814 | /* Determine the number of bit length codes to send. The pkzip format |
| @@ -819,7 +819,7 @@ local int build_bl_tree(s) | |||
| 819 | if (s->bl_tree[bl_order[max_blindex]].Len != 0) break; | 819 | if (s->bl_tree[bl_order[max_blindex]].Len != 0) break; |
| 820 | } | 820 | } |
| 821 | /* Update opt_len to include the bit length tree and counts */ | 821 | /* Update opt_len to include the bit length tree and counts */ |
| 822 | s->opt_len += 3*((ulg)max_blindex+1) + 5+5+4; | 822 | s->opt_len += 3*((ulg)max_blindex + 1) + 5 + 5 + 4; |
| 823 | Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld", | 823 | Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld", |
| 824 | s->opt_len, s->static_len)); | 824 | s->opt_len, s->static_len)); |
| 825 | 825 | ||
| @@ -841,19 +841,19 @@ local void send_all_trees(s, lcodes, dcodes, blcodes) | |||
| 841 | Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES, | 841 | Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES, |
| 842 | "too many codes"); | 842 | "too many codes"); |
| 843 | Tracev((stderr, "\nbl counts: ")); | 843 | Tracev((stderr, "\nbl counts: ")); |
| 844 | send_bits(s, lcodes-257, 5); /* not +255 as stated in appnote.txt */ | 844 | send_bits(s, lcodes - 257, 5); /* not +255 as stated in appnote.txt */ |
| 845 | send_bits(s, dcodes-1, 5); | 845 | send_bits(s, dcodes - 1, 5); |
| 846 | send_bits(s, blcodes-4, 4); /* not -3 as stated in appnote.txt */ | 846 | send_bits(s, blcodes - 4, 4); /* not -3 as stated in appnote.txt */ |
| 847 | for (rank = 0; rank < blcodes; rank++) { | 847 | for (rank = 0; rank < blcodes; rank++) { |
| 848 | Tracev((stderr, "\nbl code %2d ", bl_order[rank])); | 848 | Tracev((stderr, "\nbl code %2d ", bl_order[rank])); |
| 849 | send_bits(s, s->bl_tree[bl_order[rank]].Len, 3); | 849 | send_bits(s, s->bl_tree[bl_order[rank]].Len, 3); |
| 850 | } | 850 | } |
| 851 | Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent)); | 851 | Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent)); |
| 852 | 852 | ||
| 853 | send_tree(s, (ct_data *)s->dyn_ltree, lcodes-1); /* literal tree */ | 853 | send_tree(s, (ct_data *)s->dyn_ltree, lcodes - 1); /* literal tree */ |
| 854 | Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent)); | 854 | Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent)); |
| 855 | 855 | ||
| 856 | send_tree(s, (ct_data *)s->dyn_dtree, dcodes-1); /* distance tree */ | 856 | send_tree(s, (ct_data *)s->dyn_dtree, dcodes - 1); /* distance tree */ |
| 857 | Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent)); | 857 | Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent)); |
| 858 | } | 858 | } |
| 859 | 859 | ||
| @@ -866,7 +866,7 @@ void ZLIB_INTERNAL _tr_stored_block(s, buf, stored_len, last) | |||
| 866 | ulg stored_len; /* length of input block */ | 866 | ulg stored_len; /* length of input block */ |
| 867 | int last; /* one if this is the last block for a file */ | 867 | int last; /* one if this is the last block for a file */ |
| 868 | { | 868 | { |
| 869 | send_bits(s, (STORED_BLOCK<<1)+last, 3); /* send block type */ | 869 | send_bits(s, (STORED_BLOCK<<1) + last, 3); /* send block type */ |
| 870 | bi_windup(s); /* align on byte boundary */ | 870 | bi_windup(s); /* align on byte boundary */ |
| 871 | put_short(s, (ush)stored_len); | 871 | put_short(s, (ush)stored_len); |
| 872 | put_short(s, (ush)~stored_len); | 872 | put_short(s, (ush)~stored_len); |
| @@ -877,7 +877,7 @@ void ZLIB_INTERNAL _tr_stored_block(s, buf, stored_len, last) | |||
| 877 | s->compressed_len = (s->compressed_len + 3 + 7) & (ulg)~7L; | 877 | s->compressed_len = (s->compressed_len + 3 + 7) & (ulg)~7L; |
| 878 | s->compressed_len += (stored_len + 4) << 3; | 878 | s->compressed_len += (stored_len + 4) << 3; |
| 879 | s->bits_sent += 2*16; | 879 | s->bits_sent += 2*16; |
| 880 | s->bits_sent += stored_len<<3; | 880 | s->bits_sent += stored_len << 3; |
| 881 | #endif | 881 | #endif |
| 882 | } | 882 | } |
| 883 | 883 | ||
| @@ -943,8 +943,8 @@ void ZLIB_INTERNAL _tr_flush_block(s, buf, stored_len, last) | |||
| 943 | max_blindex = build_bl_tree(s); | 943 | max_blindex = build_bl_tree(s); |
| 944 | 944 | ||
| 945 | /* Determine the best encoding. Compute the block lengths in bytes. */ | 945 | /* Determine the best encoding. Compute the block lengths in bytes. */ |
| 946 | opt_lenb = (s->opt_len+3+7)>>3; | 946 | opt_lenb = (s->opt_len + 3 + 7) >> 3; |
| 947 | static_lenb = (s->static_len+3+7)>>3; | 947 | static_lenb = (s->static_len + 3 + 7) >> 3; |
| 948 | 948 | ||
| 949 | Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ", | 949 | Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ", |
| 950 | opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len, | 950 | opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len, |
| @@ -963,7 +963,7 @@ void ZLIB_INTERNAL _tr_flush_block(s, buf, stored_len, last) | |||
| 963 | #ifdef FORCE_STORED | 963 | #ifdef FORCE_STORED |
| 964 | if (buf != (char*)0) { /* force stored block */ | 964 | if (buf != (char*)0) { /* force stored block */ |
| 965 | #else | 965 | #else |
| 966 | if (stored_len+4 <= opt_lenb && buf != (char*)0) { | 966 | if (stored_len + 4 <= opt_lenb && buf != (char*)0) { |
| 967 | /* 4: two words for the lengths */ | 967 | /* 4: two words for the lengths */ |
| 968 | #endif | 968 | #endif |
| 969 | /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE. | 969 | /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE. |
| @@ -975,16 +975,16 @@ void ZLIB_INTERNAL _tr_flush_block(s, buf, stored_len, last) | |||
| 975 | _tr_stored_block(s, buf, stored_len, last); | 975 | _tr_stored_block(s, buf, stored_len, last); |
| 976 | 976 | ||
| 977 | } else if (static_lenb == opt_lenb) { | 977 | } else if (static_lenb == opt_lenb) { |
| 978 | send_bits(s, (STATIC_TREES<<1)+last, 3); | 978 | send_bits(s, (STATIC_TREES<<1) + last, 3); |
| 979 | compress_block(s, (const ct_data *)static_ltree, | 979 | compress_block(s, (const ct_data *)static_ltree, |
| 980 | (const ct_data *)static_dtree); | 980 | (const ct_data *)static_dtree); |
| 981 | #ifdef ZLIB_DEBUG | 981 | #ifdef ZLIB_DEBUG |
| 982 | s->compressed_len += 3 + s->static_len; | 982 | s->compressed_len += 3 + s->static_len; |
| 983 | #endif | 983 | #endif |
| 984 | } else { | 984 | } else { |
| 985 | send_bits(s, (DYN_TREES<<1)+last, 3); | 985 | send_bits(s, (DYN_TREES<<1) + last, 3); |
| 986 | send_all_trees(s, s->l_desc.max_code+1, s->d_desc.max_code+1, | 986 | send_all_trees(s, s->l_desc.max_code + 1, s->d_desc.max_code + 1, |
| 987 | max_blindex+1); | 987 | max_blindex + 1); |
| 988 | compress_block(s, (const ct_data *)s->dyn_ltree, | 988 | compress_block(s, (const ct_data *)s->dyn_ltree, |
| 989 | (const ct_data *)s->dyn_dtree); | 989 | (const ct_data *)s->dyn_dtree); |
| 990 | #ifdef ZLIB_DEBUG | 990 | #ifdef ZLIB_DEBUG |
| @@ -1003,18 +1003,18 @@ void ZLIB_INTERNAL _tr_flush_block(s, buf, stored_len, last) | |||
| 1003 | s->compressed_len += 7; /* align on byte boundary */ | 1003 | s->compressed_len += 7; /* align on byte boundary */ |
| 1004 | #endif | 1004 | #endif |
| 1005 | } | 1005 | } |
| 1006 | Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3, | 1006 | Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len >> 3, |
| 1007 | s->compressed_len-7*last)); | 1007 | s->compressed_len - 7*last)); |
| 1008 | } | 1008 | } |
| 1009 | 1009 | ||
| 1010 | /* =========================================================================== | 1010 | /* =========================================================================== |
| 1011 | * Save the match info and tally the frequency counts. Return true if | 1011 | * Save the match info and tally the frequency counts. Return true if |
| 1012 | * the current block must be flushed. | 1012 | * the current block must be flushed. |
| 1013 | */ | 1013 | */ |
| 1014 | int ZLIB_INTERNAL _tr_tally (s, dist, lc) | 1014 | int ZLIB_INTERNAL _tr_tally(s, dist, lc) |
| 1015 | deflate_state *s; | 1015 | deflate_state *s; |
| 1016 | unsigned dist; /* distance of matched string */ | 1016 | unsigned dist; /* distance of matched string */ |
| 1017 | unsigned lc; /* match length-MIN_MATCH or unmatched char (if dist==0) */ | 1017 | unsigned lc; /* match length - MIN_MATCH or unmatched char (dist==0) */ |
| 1018 | { | 1018 | { |
| 1019 | s->sym_buf[s->sym_next++] = (uch)dist; | 1019 | s->sym_buf[s->sym_next++] = (uch)dist; |
| 1020 | s->sym_buf[s->sym_next++] = (uch)(dist >> 8); | 1020 | s->sym_buf[s->sym_next++] = (uch)(dist >> 8); |
| @@ -1030,7 +1030,7 @@ int ZLIB_INTERNAL _tr_tally (s, dist, lc) | |||
| 1030 | (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) && | 1030 | (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) && |
| 1031 | (ush)d_code(dist) < (ush)D_CODES, "_tr_tally: bad match"); | 1031 | (ush)d_code(dist) < (ush)D_CODES, "_tr_tally: bad match"); |
| 1032 | 1032 | ||
| 1033 | s->dyn_ltree[_length_code[lc]+LITERALS+1].Freq++; | 1033 | s->dyn_ltree[_length_code[lc] + LITERALS + 1].Freq++; |
| 1034 | s->dyn_dtree[d_code(dist)].Freq++; | 1034 | s->dyn_dtree[d_code(dist)].Freq++; |
| 1035 | } | 1035 | } |
| 1036 | return (s->sym_next == s->sym_end); | 1036 | return (s->sym_next == s->sym_end); |
| @@ -1060,7 +1060,7 @@ local void compress_block(s, ltree, dtree) | |||
| 1060 | } else { | 1060 | } else { |
| 1061 | /* Here, lc is the match length - MIN_MATCH */ | 1061 | /* Here, lc is the match length - MIN_MATCH */ |
| 1062 | code = _length_code[lc]; | 1062 | code = _length_code[lc]; |
| 1063 | send_code(s, code+LITERALS+1, ltree); /* send the length code */ | 1063 | send_code(s, code + LITERALS + 1, ltree); /* send length code */ |
| 1064 | extra = extra_lbits[code]; | 1064 | extra = extra_lbits[code]; |
| 1065 | if (extra != 0) { | 1065 | if (extra != 0) { |
| 1066 | lc -= base_length[code]; | 1066 | lc -= base_length[code]; |
| @@ -1176,6 +1176,6 @@ local void bi_windup(s) | |||
| 1176 | s->bi_buf = 0; | 1176 | s->bi_buf = 0; |
| 1177 | s->bi_valid = 0; | 1177 | s->bi_valid = 0; |
| 1178 | #ifdef ZLIB_DEBUG | 1178 | #ifdef ZLIB_DEBUG |
| 1179 | s->bits_sent = (s->bits_sent+7) & ~7; | 1179 | s->bits_sent = (s->bits_sent + 7) & ~7; |
| 1180 | #endif | 1180 | #endif |
| 1181 | } | 1181 | } |
| @@ -24,7 +24,7 @@ | |||
| 24 | Z_DATA_ERROR if the input data was corrupted, including if the input data is | 24 | Z_DATA_ERROR if the input data was corrupted, including if the input data is |
| 25 | an incomplete zlib stream. | 25 | an incomplete zlib stream. |
| 26 | */ | 26 | */ |
| 27 | int ZEXPORT uncompress2 (dest, destLen, source, sourceLen) | 27 | int ZEXPORT uncompress2(dest, destLen, source, sourceLen) |
| 28 | Bytef *dest; | 28 | Bytef *dest; |
| 29 | uLongf *destLen; | 29 | uLongf *destLen; |
| 30 | const Bytef *source; | 30 | const Bytef *source; |
| @@ -83,7 +83,7 @@ int ZEXPORT uncompress2 (dest, destLen, source, sourceLen) | |||
| 83 | err; | 83 | err; |
| 84 | } | 84 | } |
| 85 | 85 | ||
| 86 | int ZEXPORT uncompress (dest, destLen, source, sourceLen) | 86 | int ZEXPORT uncompress(dest, destLen, source, sourceLen) |
| 87 | Bytef *dest; | 87 | Bytef *dest; |
| 88 | uLongf *destLen; | 88 | uLongf *destLen; |
| 89 | const Bytef *source; | 89 | const Bytef *source; |
| @@ -1913,7 +1913,7 @@ ZEXTERN int ZEXPORT inflateSyncPoint OF((z_streamp)); | |||
| 1913 | ZEXTERN const z_crc_t FAR * ZEXPORT get_crc_table OF((void)); | 1913 | ZEXTERN const z_crc_t FAR * ZEXPORT get_crc_table OF((void)); |
| 1914 | ZEXTERN int ZEXPORT inflateUndermine OF((z_streamp, int)); | 1914 | ZEXTERN int ZEXPORT inflateUndermine OF((z_streamp, int)); |
| 1915 | ZEXTERN int ZEXPORT inflateValidate OF((z_streamp, int)); | 1915 | ZEXTERN int ZEXPORT inflateValidate OF((z_streamp, int)); |
| 1916 | ZEXTERN unsigned long ZEXPORT inflateCodesUsed OF ((z_streamp)); | 1916 | ZEXTERN unsigned long ZEXPORT inflateCodesUsed OF((z_streamp)); |
| 1917 | ZEXTERN int ZEXPORT inflateResetKeep OF((z_streamp)); | 1917 | ZEXTERN int ZEXPORT inflateResetKeep OF((z_streamp)); |
| 1918 | ZEXTERN int ZEXPORT deflateResetKeep OF((z_streamp)); | 1918 | ZEXTERN int ZEXPORT deflateResetKeep OF((z_streamp)); |
| 1919 | #if defined(_WIN32) && !defined(Z_SOLO) | 1919 | #if defined(_WIN32) && !defined(Z_SOLO) |
| @@ -119,7 +119,7 @@ uLong ZEXPORT zlibCompileFlags() | |||
| 119 | # endif | 119 | # endif |
| 120 | int ZLIB_INTERNAL z_verbose = verbose; | 120 | int ZLIB_INTERNAL z_verbose = verbose; |
| 121 | 121 | ||
| 122 | void ZLIB_INTERNAL z_error (m) | 122 | void ZLIB_INTERNAL z_error(m) |
| 123 | char *m; | 123 | char *m; |
| 124 | { | 124 | { |
| 125 | fprintf(stderr, "%s\n", m); | 125 | fprintf(stderr, "%s\n", m); |
| @@ -214,7 +214,7 @@ local ptr_table table[MAX_PTR]; | |||
| 214 | * a protected system like OS/2. Use Microsoft C instead. | 214 | * a protected system like OS/2. Use Microsoft C instead. |
| 215 | */ | 215 | */ |
| 216 | 216 | ||
| 217 | voidpf ZLIB_INTERNAL zcalloc (voidpf opaque, unsigned items, unsigned size) | 217 | voidpf ZLIB_INTERNAL zcalloc(voidpf opaque, unsigned items, unsigned size) |
| 218 | { | 218 | { |
| 219 | voidpf buf; | 219 | voidpf buf; |
| 220 | ulg bsize = (ulg)items*size; | 220 | ulg bsize = (ulg)items*size; |
| @@ -240,7 +240,7 @@ voidpf ZLIB_INTERNAL zcalloc (voidpf opaque, unsigned items, unsigned size) | |||
| 240 | return buf; | 240 | return buf; |
| 241 | } | 241 | } |
| 242 | 242 | ||
| 243 | void ZLIB_INTERNAL zcfree (voidpf opaque, voidpf ptr) | 243 | void ZLIB_INTERNAL zcfree(voidpf opaque, voidpf ptr) |
| 244 | { | 244 | { |
| 245 | int n; | 245 | int n; |
| 246 | 246 | ||
| @@ -277,13 +277,13 @@ void ZLIB_INTERNAL zcfree (voidpf opaque, voidpf ptr) | |||
| 277 | # define _hfree hfree | 277 | # define _hfree hfree |
| 278 | #endif | 278 | #endif |
| 279 | 279 | ||
| 280 | voidpf ZLIB_INTERNAL zcalloc (voidpf opaque, uInt items, uInt size) | 280 | voidpf ZLIB_INTERNAL zcalloc(voidpf opaque, uInt items, uInt size) |
| 281 | { | 281 | { |
| 282 | (void)opaque; | 282 | (void)opaque; |
| 283 | return _halloc((long)items, size); | 283 | return _halloc((long)items, size); |
| 284 | } | 284 | } |
| 285 | 285 | ||
| 286 | void ZLIB_INTERNAL zcfree (voidpf opaque, voidpf ptr) | 286 | void ZLIB_INTERNAL zcfree(voidpf opaque, voidpf ptr) |
| 287 | { | 287 | { |
| 288 | (void)opaque; | 288 | (void)opaque; |
| 289 | _hfree(ptr); | 289 | _hfree(ptr); |
| @@ -302,7 +302,7 @@ extern voidp calloc OF((uInt items, uInt size)); | |||
| 302 | extern void free OF((voidpf ptr)); | 302 | extern void free OF((voidpf ptr)); |
| 303 | #endif | 303 | #endif |
| 304 | 304 | ||
| 305 | voidpf ZLIB_INTERNAL zcalloc (opaque, items, size) | 305 | voidpf ZLIB_INTERNAL zcalloc(opaque, items, size) |
| 306 | voidpf opaque; | 306 | voidpf opaque; |
| 307 | unsigned items; | 307 | unsigned items; |
| 308 | unsigned size; | 308 | unsigned size; |
| @@ -312,7 +312,7 @@ voidpf ZLIB_INTERNAL zcalloc (opaque, items, size) | |||
| 312 | (voidpf)calloc(items, size); | 312 | (voidpf)calloc(items, size); |
| 313 | } | 313 | } |
| 314 | 314 | ||
| 315 | void ZLIB_INTERNAL zcfree (opaque, ptr) | 315 | void ZLIB_INTERNAL zcfree(opaque, ptr) |
| 316 | voidpf opaque; | 316 | voidpf opaque; |
| 317 | voidpf ptr; | 317 | voidpf ptr; |
| 318 | { | 318 | { |
