diff options
author | Denys Vlasenko <vda.linux@googlemail.com> | 2009-09-12 17:57:19 +0200 |
---|---|---|
committer | Denys Vlasenko <vda.linux@googlemail.com> | 2009-09-12 17:57:19 +0200 |
commit | d7686c8c2c849c775007c5de19901ab6b38bd039 (patch) | |
tree | 8d4017407166a4f16a0fecd7921a8d03975636bf /arch | |
parent | e7aa0d9eca180f77ed4226ecaa1e8961d842add7 (diff) | |
download | busybox-w32-1_15_1.tar.gz busybox-w32-1_15_1.tar.bz2 busybox-w32-1_15_1.zip |
Apply post-1.15.0 fixes; bump version to 1.15.11_15_1
Signed-off-by: Denys Vlasenko <vda.linux@googlemail.com>
Diffstat (limited to '')
-rw-r--r-- | archival/Config.in | 4 | ||||
-rw-r--r-- | archival/libunarchive/data_extract_all.c | 2 | ||||
-rw-r--r-- | archival/libunarchive/decompress_unlzma.c | 154 |
3 files changed, 99 insertions, 61 deletions
diff --git a/archival/Config.in b/archival/Config.in index cae7f20bb..71b953819 100644 --- a/archival/Config.in +++ b/archival/Config.in | |||
@@ -298,8 +298,8 @@ config FEATURE_LZMA_FAST | |||
298 | default n | 298 | default n |
299 | depends on UNLZMA | 299 | depends on UNLZMA |
300 | help | 300 | help |
301 | This option reduces decompression time by about 25% at the cost of | 301 | This option reduces decompression time by about 33% at the cost of |
302 | a 1K bigger binary. | 302 | a 2K bigger binary. |
303 | 303 | ||
304 | config UNZIP | 304 | config UNZIP |
305 | bool "unzip" | 305 | bool "unzip" |
diff --git a/archival/libunarchive/data_extract_all.c b/archival/libunarchive/data_extract_all.c index 123d1de74..d79ef7cb9 100644 --- a/archival/libunarchive/data_extract_all.c +++ b/archival/libunarchive/data_extract_all.c | |||
@@ -132,7 +132,7 @@ void FAST_FUNC data_extract_all(archive_handle_t *archive_handle) | |||
132 | #endif | 132 | #endif |
133 | lchown(file_header->name, file_header->uid, file_header->gid); | 133 | lchown(file_header->name, file_header->uid, file_header->gid); |
134 | } | 134 | } |
135 | if (S_ISLNK(file_header->mode)) { | 135 | if (!S_ISLNK(file_header->mode)) { |
136 | /* uclibc has no lchmod, glibc is even stranger - | 136 | /* uclibc has no lchmod, glibc is even stranger - |
137 | * it has lchmod which seems to do nothing! | 137 | * it has lchmod which seems to do nothing! |
138 | * so we use chmod... */ | 138 | * so we use chmod... */ |
diff --git a/archival/libunarchive/decompress_unlzma.c b/archival/libunarchive/decompress_unlzma.c index 68085d68c..33e5cd65d 100644 --- a/archival/libunarchive/decompress_unlzma.c +++ b/archival/libunarchive/decompress_unlzma.c | |||
@@ -8,15 +8,14 @@ | |||
8 | * | 8 | * |
9 | * Licensed under GPLv2 or later, see file LICENSE in this tarball for details. | 9 | * Licensed under GPLv2 or later, see file LICENSE in this tarball for details. |
10 | */ | 10 | */ |
11 | |||
11 | #include "libbb.h" | 12 | #include "libbb.h" |
12 | #include "unarchive.h" | 13 | #include "unarchive.h" |
13 | 14 | ||
14 | #if ENABLE_FEATURE_LZMA_FAST | 15 | #if ENABLE_FEATURE_LZMA_FAST |
15 | # define speed_inline ALWAYS_INLINE | 16 | # define speed_inline ALWAYS_INLINE |
16 | # define size_inline | ||
17 | #else | 17 | #else |
18 | # define speed_inline | 18 | # define speed_inline |
19 | # define size_inline ALWAYS_INLINE | ||
20 | #endif | 19 | #endif |
21 | 20 | ||
22 | 21 | ||
@@ -45,8 +44,8 @@ typedef struct { | |||
45 | #define RC_MODEL_TOTAL_BITS 11 | 44 | #define RC_MODEL_TOTAL_BITS 11 |
46 | 45 | ||
47 | 46 | ||
48 | /* Called twice: once at startup (LZMA_FAST only) and once in rc_normalize() */ | 47 | /* Called twice: once at startup and once in rc_normalize() */ |
49 | static size_inline void rc_read(rc_t *rc) | 48 | static void rc_read(rc_t *rc) |
50 | { | 49 | { |
51 | int buffer_size = safe_read(rc->fd, RC_BUFFER, RC_BUFFER_SIZE); | 50 | int buffer_size = safe_read(rc->fd, RC_BUFFER, RC_BUFFER_SIZE); |
52 | if (buffer_size <= 0) | 51 | if (buffer_size <= 0) |
@@ -55,17 +54,8 @@ static size_inline void rc_read(rc_t *rc) | |||
55 | rc->buffer_end = RC_BUFFER + buffer_size; | 54 | rc->buffer_end = RC_BUFFER + buffer_size; |
56 | } | 55 | } |
57 | 56 | ||
58 | /* Called twice, but one callsite is in speed_inline'd rc_is_bit_1() */ | ||
59 | static void rc_do_normalize(rc_t *rc) | ||
60 | { | ||
61 | if (rc->ptr >= rc->buffer_end) | ||
62 | rc_read(rc); | ||
63 | rc->range <<= 8; | ||
64 | rc->code = (rc->code << 8) | *rc->ptr++; | ||
65 | } | ||
66 | |||
67 | /* Called once */ | 57 | /* Called once */ |
68 | static ALWAYS_INLINE rc_t* rc_init(int fd) /*, int buffer_size) */ | 58 | static rc_t* rc_init(int fd) /*, int buffer_size) */ |
69 | { | 59 | { |
70 | int i; | 60 | int i; |
71 | rc_t *rc; | 61 | rc_t *rc; |
@@ -73,18 +63,17 @@ static ALWAYS_INLINE rc_t* rc_init(int fd) /*, int buffer_size) */ | |||
73 | rc = xmalloc(sizeof(*rc) + RC_BUFFER_SIZE); | 63 | rc = xmalloc(sizeof(*rc) + RC_BUFFER_SIZE); |
74 | 64 | ||
75 | rc->fd = fd; | 65 | rc->fd = fd; |
66 | /* rc->buffer_size = buffer_size; */ | ||
67 | rc->buffer_end = RC_BUFFER + RC_BUFFER_SIZE; | ||
76 | rc->ptr = rc->buffer_end; | 68 | rc->ptr = rc->buffer_end; |
77 | 69 | ||
70 | rc->code = 0; | ||
71 | rc->range = 0xFFFFFFFF; | ||
78 | for (i = 0; i < 5; i++) { | 72 | for (i = 0; i < 5; i++) { |
79 | #if ENABLE_FEATURE_LZMA_FAST | ||
80 | if (rc->ptr >= rc->buffer_end) | 73 | if (rc->ptr >= rc->buffer_end) |
81 | rc_read(rc); | 74 | rc_read(rc); |
82 | rc->code = (rc->code << 8) | *rc->ptr++; | 75 | rc->code = (rc->code << 8) | *rc->ptr++; |
83 | #else | ||
84 | rc_do_normalize(rc); | ||
85 | #endif | ||
86 | } | 76 | } |
87 | rc->range = 0xFFFFFFFF; | ||
88 | return rc; | 77 | return rc; |
89 | } | 78 | } |
90 | 79 | ||
@@ -94,6 +83,14 @@ static ALWAYS_INLINE void rc_free(rc_t *rc) | |||
94 | free(rc); | 83 | free(rc); |
95 | } | 84 | } |
96 | 85 | ||
86 | /* Called twice, but one callsite is in speed_inline'd rc_is_bit_0_helper() */ | ||
87 | static void rc_do_normalize(rc_t *rc) | ||
88 | { | ||
89 | if (rc->ptr >= rc->buffer_end) | ||
90 | rc_read(rc); | ||
91 | rc->range <<= 8; | ||
92 | rc->code = (rc->code << 8) | *rc->ptr++; | ||
93 | } | ||
97 | static ALWAYS_INLINE void rc_normalize(rc_t *rc) | 94 | static ALWAYS_INLINE void rc_normalize(rc_t *rc) |
98 | { | 95 | { |
99 | if (rc->range < (1 << RC_TOP_BITS)) { | 96 | if (rc->range < (1 << RC_TOP_BITS)) { |
@@ -101,28 +98,49 @@ static ALWAYS_INLINE void rc_normalize(rc_t *rc) | |||
101 | } | 98 | } |
102 | } | 99 | } |
103 | 100 | ||
104 | /* rc_is_bit_1 is called 9 times */ | 101 | /* rc_is_bit_0 is called 9 times */ |
105 | static speed_inline int rc_is_bit_1(rc_t *rc, uint16_t *p) | 102 | /* Why rc_is_bit_0_helper exists? |
103 | * Because we want to always expose (rc->code < rc->bound) to optimizer. | ||
104 | * Thus rc_is_bit_0 is always inlined, and rc_is_bit_0_helper is inlined | ||
105 | * only if we compile for speed. | ||
106 | */ | ||
107 | static speed_inline uint32_t rc_is_bit_0_helper(rc_t *rc, uint16_t *p) | ||
106 | { | 108 | { |
107 | rc_normalize(rc); | 109 | rc_normalize(rc); |
108 | rc->bound = *p * (rc->range >> RC_MODEL_TOTAL_BITS); | 110 | rc->bound = *p * (rc->range >> RC_MODEL_TOTAL_BITS); |
109 | if (rc->code < rc->bound) { | 111 | return rc->bound; |
110 | rc->range = rc->bound; | 112 | } |
111 | *p += ((1 << RC_MODEL_TOTAL_BITS) - *p) >> RC_MOVE_BITS; | 113 | static ALWAYS_INLINE int rc_is_bit_0(rc_t *rc, uint16_t *p) |
112 | return 0; | 114 | { |
113 | } | 115 | uint32_t t = rc_is_bit_0_helper(rc, p); |
116 | return rc->code < t; | ||
117 | } | ||
118 | |||
119 | /* Called ~10 times, but very small, thus inlined */ | ||
120 | static speed_inline void rc_update_bit_0(rc_t *rc, uint16_t *p) | ||
121 | { | ||
122 | rc->range = rc->bound; | ||
123 | *p += ((1 << RC_MODEL_TOTAL_BITS) - *p) >> RC_MOVE_BITS; | ||
124 | } | ||
125 | static speed_inline void rc_update_bit_1(rc_t *rc, uint16_t *p) | ||
126 | { | ||
114 | rc->range -= rc->bound; | 127 | rc->range -= rc->bound; |
115 | rc->code -= rc->bound; | 128 | rc->code -= rc->bound; |
116 | *p -= *p >> RC_MOVE_BITS; | 129 | *p -= *p >> RC_MOVE_BITS; |
117 | return 1; | ||
118 | } | 130 | } |
119 | 131 | ||
120 | /* Called 4 times in unlzma loop */ | 132 | /* Called 4 times in unlzma loop */ |
121 | static speed_inline int rc_get_bit(rc_t *rc, uint16_t *p, int *symbol) | 133 | static int rc_get_bit(rc_t *rc, uint16_t *p, int *symbol) |
122 | { | 134 | { |
123 | int ret = rc_is_bit_1(rc, p); | 135 | if (rc_is_bit_0(rc, p)) { |
124 | *symbol = *symbol * 2 + ret; | 136 | rc_update_bit_0(rc, p); |
125 | return ret; | 137 | *symbol *= 2; |
138 | return 0; | ||
139 | } else { | ||
140 | rc_update_bit_1(rc, p); | ||
141 | *symbol = *symbol * 2 + 1; | ||
142 | return 1; | ||
143 | } | ||
126 | } | 144 | } |
127 | 145 | ||
128 | /* Called once */ | 146 | /* Called once */ |
@@ -248,13 +266,13 @@ unpack_lzma_stream(int src_fd, int dst_fd) | |||
248 | header.dst_size = SWAP_LE64(header.dst_size); | 266 | header.dst_size = SWAP_LE64(header.dst_size); |
249 | 267 | ||
250 | if (header.dict_size == 0) | 268 | if (header.dict_size == 0) |
251 | header.dict_size++; | 269 | header.dict_size = 1; |
252 | 270 | ||
253 | buffer = xmalloc(MIN(header.dst_size, header.dict_size)); | 271 | buffer = xmalloc(MIN(header.dst_size, header.dict_size)); |
254 | 272 | ||
255 | num_probs = LZMA_BASE_SIZE + (LZMA_LIT_SIZE << (lc + lp)); | 273 | num_probs = LZMA_BASE_SIZE + (LZMA_LIT_SIZE << (lc + lp)); |
256 | p = xmalloc(num_probs * sizeof(*p)); | 274 | p = xmalloc(num_probs * sizeof(*p)); |
257 | num_probs += LZMA_LITERAL - LZMA_BASE_SIZE; | 275 | num_probs = LZMA_LITERAL + (LZMA_LIT_SIZE << (lc + lp)); |
258 | for (i = 0; i < num_probs; i++) | 276 | for (i = 0; i < num_probs; i++) |
259 | p[i] = (1 << RC_MODEL_TOTAL_BITS) >> 1; | 277 | p[i] = (1 << RC_MODEL_TOTAL_BITS) >> 1; |
260 | 278 | ||
@@ -264,8 +282,9 @@ unpack_lzma_stream(int src_fd, int dst_fd) | |||
264 | int pos_state = (buffer_pos + global_pos) & pos_state_mask; | 282 | int pos_state = (buffer_pos + global_pos) & pos_state_mask; |
265 | 283 | ||
266 | prob = p + LZMA_IS_MATCH + (state << LZMA_NUM_POS_BITS_MAX) + pos_state; | 284 | prob = p + LZMA_IS_MATCH + (state << LZMA_NUM_POS_BITS_MAX) + pos_state; |
267 | if (!rc_is_bit_1(rc, prob)) { | 285 | if (rc_is_bit_0(rc, prob)) { |
268 | mi = 1; | 286 | mi = 1; |
287 | rc_update_bit_0(rc, prob); | ||
269 | prob = (p + LZMA_LITERAL | 288 | prob = (p + LZMA_LITERAL |
270 | + (LZMA_LIT_SIZE * ((((buffer_pos + global_pos) & literal_pos_mask) << lc) | 289 | + (LZMA_LIT_SIZE * ((((buffer_pos + global_pos) & literal_pos_mask) << lc) |
271 | + (previous_byte >> (8 - lc)) | 290 | + (previous_byte >> (8 - lc)) |
@@ -321,21 +340,27 @@ unpack_lzma_stream(int src_fd, int dst_fd) | |||
321 | int offset; | 340 | int offset; |
322 | uint16_t *prob_len; | 341 | uint16_t *prob_len; |
323 | 342 | ||
343 | rc_update_bit_1(rc, prob); | ||
324 | prob = p + LZMA_IS_REP + state; | 344 | prob = p + LZMA_IS_REP + state; |
325 | if (!rc_is_bit_1(rc, prob)) { | 345 | if (rc_is_bit_0(rc, prob)) { |
346 | rc_update_bit_0(rc, prob); | ||
326 | rep3 = rep2; | 347 | rep3 = rep2; |
327 | rep2 = rep1; | 348 | rep2 = rep1; |
328 | rep1 = rep0; | 349 | rep1 = rep0; |
329 | state = state < LZMA_NUM_LIT_STATES ? 0 : 3; | 350 | state = state < LZMA_NUM_LIT_STATES ? 0 : 3; |
330 | prob = p + LZMA_LEN_CODER; | 351 | prob = p + LZMA_LEN_CODER; |
331 | } else { | 352 | } else { |
332 | prob += LZMA_IS_REP_G0 - LZMA_IS_REP; | 353 | rc_update_bit_1(rc, prob); |
333 | if (!rc_is_bit_1(rc, prob)) { | 354 | prob = p + LZMA_IS_REP_G0 + state; |
355 | if (rc_is_bit_0(rc, prob)) { | ||
356 | rc_update_bit_0(rc, prob); | ||
334 | prob = (p + LZMA_IS_REP_0_LONG | 357 | prob = (p + LZMA_IS_REP_0_LONG |
335 | + (state << LZMA_NUM_POS_BITS_MAX) | 358 | + (state << LZMA_NUM_POS_BITS_MAX) |
336 | + pos_state | 359 | + pos_state |
337 | ); | 360 | ); |
338 | if (!rc_is_bit_1(rc, prob)) { | 361 | if (rc_is_bit_0(rc, prob)) { |
362 | rc_update_bit_0(rc, prob); | ||
363 | |||
339 | state = state < LZMA_NUM_LIT_STATES ? 9 : 11; | 364 | state = state < LZMA_NUM_LIT_STATES ? 9 : 11; |
340 | #if ENABLE_FEATURE_LZMA_FAST | 365 | #if ENABLE_FEATURE_LZMA_FAST |
341 | pos = buffer_pos - rep0; | 366 | pos = buffer_pos - rep0; |
@@ -347,16 +372,25 @@ unpack_lzma_stream(int src_fd, int dst_fd) | |||
347 | len = 1; | 372 | len = 1; |
348 | goto string; | 373 | goto string; |
349 | #endif | 374 | #endif |
375 | } else { | ||
376 | rc_update_bit_1(rc, prob); | ||
350 | } | 377 | } |
351 | } else { | 378 | } else { |
352 | uint32_t distance; | 379 | uint32_t distance; |
353 | 380 | ||
354 | prob += LZMA_IS_REP_G1 - LZMA_IS_REP_G0; | 381 | rc_update_bit_1(rc, prob); |
355 | distance = rep1; | 382 | prob = p + LZMA_IS_REP_G1 + state; |
356 | if (rc_is_bit_1(rc, prob)) { | 383 | if (rc_is_bit_0(rc, prob)) { |
357 | prob += LZMA_IS_REP_G2 - LZMA_IS_REP_G1; | 384 | rc_update_bit_0(rc, prob); |
358 | distance = rep2; | 385 | distance = rep1; |
359 | if (rc_is_bit_1(rc, prob)) { | 386 | } else { |
387 | rc_update_bit_1(rc, prob); | ||
388 | prob = p + LZMA_IS_REP_G2 + state; | ||
389 | if (rc_is_bit_0(rc, prob)) { | ||
390 | rc_update_bit_0(rc, prob); | ||
391 | distance = rep2; | ||
392 | } else { | ||
393 | rc_update_bit_1(rc, prob); | ||
360 | distance = rep3; | 394 | distance = rep3; |
361 | rep3 = rep2; | 395 | rep3 = rep2; |
362 | } | 396 | } |
@@ -370,20 +404,24 @@ unpack_lzma_stream(int src_fd, int dst_fd) | |||
370 | } | 404 | } |
371 | 405 | ||
372 | prob_len = prob + LZMA_LEN_CHOICE; | 406 | prob_len = prob + LZMA_LEN_CHOICE; |
373 | if (!rc_is_bit_1(rc, prob_len)) { | 407 | if (rc_is_bit_0(rc, prob_len)) { |
374 | prob_len += LZMA_LEN_LOW - LZMA_LEN_CHOICE | 408 | rc_update_bit_0(rc, prob_len); |
375 | + (pos_state << LZMA_LEN_NUM_LOW_BITS); | 409 | prob_len = (prob + LZMA_LEN_LOW |
410 | + (pos_state << LZMA_LEN_NUM_LOW_BITS)); | ||
376 | offset = 0; | 411 | offset = 0; |
377 | num_bits = LZMA_LEN_NUM_LOW_BITS; | 412 | num_bits = LZMA_LEN_NUM_LOW_BITS; |
378 | } else { | 413 | } else { |
379 | prob_len += LZMA_LEN_CHOICE_2 - LZMA_LEN_CHOICE; | 414 | rc_update_bit_1(rc, prob_len); |
380 | if (!rc_is_bit_1(rc, prob_len)) { | 415 | prob_len = prob + LZMA_LEN_CHOICE_2; |
381 | prob_len += LZMA_LEN_MID - LZMA_LEN_CHOICE_2 | 416 | if (rc_is_bit_0(rc, prob_len)) { |
382 | + (pos_state << LZMA_LEN_NUM_MID_BITS); | 417 | rc_update_bit_0(rc, prob_len); |
418 | prob_len = (prob + LZMA_LEN_MID | ||
419 | + (pos_state << LZMA_LEN_NUM_MID_BITS)); | ||
383 | offset = 1 << LZMA_LEN_NUM_LOW_BITS; | 420 | offset = 1 << LZMA_LEN_NUM_LOW_BITS; |
384 | num_bits = LZMA_LEN_NUM_MID_BITS; | 421 | num_bits = LZMA_LEN_NUM_MID_BITS; |
385 | } else { | 422 | } else { |
386 | prob_len += LZMA_LEN_HIGH - LZMA_LEN_CHOICE_2; | 423 | rc_update_bit_1(rc, prob_len); |
424 | prob_len = prob + LZMA_LEN_HIGH; | ||
387 | offset = ((1 << LZMA_LEN_NUM_LOW_BITS) | 425 | offset = ((1 << LZMA_LEN_NUM_LOW_BITS) |
388 | + (1 << LZMA_LEN_NUM_MID_BITS)); | 426 | + (1 << LZMA_LEN_NUM_MID_BITS)); |
389 | num_bits = LZMA_LEN_NUM_HIGH_BITS; | 427 | num_bits = LZMA_LEN_NUM_HIGH_BITS; |
@@ -400,20 +438,19 @@ unpack_lzma_stream(int src_fd, int dst_fd) | |||
400 | ((len < LZMA_NUM_LEN_TO_POS_STATES ? len : | 438 | ((len < LZMA_NUM_LEN_TO_POS_STATES ? len : |
401 | LZMA_NUM_LEN_TO_POS_STATES - 1) | 439 | LZMA_NUM_LEN_TO_POS_STATES - 1) |
402 | << LZMA_NUM_POS_SLOT_BITS); | 440 | << LZMA_NUM_POS_SLOT_BITS); |
403 | rc_bit_tree_decode(rc, prob, | 441 | rc_bit_tree_decode(rc, prob, LZMA_NUM_POS_SLOT_BITS, |
404 | LZMA_NUM_POS_SLOT_BITS, &pos_slot); | 442 | &pos_slot); |
405 | rep0 = pos_slot; | ||
406 | if (pos_slot >= LZMA_START_POS_MODEL_INDEX) { | 443 | if (pos_slot >= LZMA_START_POS_MODEL_INDEX) { |
407 | num_bits = (pos_slot >> 1) - 1; | 444 | num_bits = (pos_slot >> 1) - 1; |
408 | rep0 = 2 | (pos_slot & 1); | 445 | rep0 = 2 | (pos_slot & 1); |
409 | prob = p + LZMA_ALIGN; | ||
410 | if (pos_slot < LZMA_END_POS_MODEL_INDEX) { | 446 | if (pos_slot < LZMA_END_POS_MODEL_INDEX) { |
411 | rep0 <<= num_bits; | 447 | rep0 <<= num_bits; |
412 | prob += LZMA_SPEC_POS - LZMA_ALIGN - 1 + rep0 - pos_slot; | 448 | prob = p + LZMA_SPEC_POS + rep0 - pos_slot - 1; |
413 | } else { | 449 | } else { |
414 | num_bits -= LZMA_NUM_ALIGN_BITS; | 450 | num_bits -= LZMA_NUM_ALIGN_BITS; |
415 | while (num_bits--) | 451 | while (num_bits--) |
416 | rep0 = (rep0 << 1) | rc_direct_bit(rc); | 452 | rep0 = (rep0 << 1) | rc_direct_bit(rc); |
453 | prob = p + LZMA_ALIGN; | ||
417 | rep0 <<= LZMA_NUM_ALIGN_BITS; | 454 | rep0 <<= LZMA_NUM_ALIGN_BITS; |
418 | num_bits = LZMA_NUM_ALIGN_BITS; | 455 | num_bits = LZMA_NUM_ALIGN_BITS; |
419 | } | 456 | } |
@@ -424,7 +461,8 @@ unpack_lzma_stream(int src_fd, int dst_fd) | |||
424 | rep0 |= i; | 461 | rep0 |= i; |
425 | i <<= 1; | 462 | i <<= 1; |
426 | } | 463 | } |
427 | } | 464 | } else |
465 | rep0 = pos_slot; | ||
428 | if (++rep0 == 0) | 466 | if (++rep0 == 0) |
429 | break; | 467 | break; |
430 | } | 468 | } |