• Home
  • History
  • Annotate
  • Line#
  • Navigate
  • Raw
  • Download
  • only in /asuswrt-rt-n18u-9.0.0.4.380.2695/release/src-rt/router/busybox/archival/libunarchive/
1/* vi: set sw=4 ts=4: */
2/*
3 * Small lzma deflate implementation.
4 * Copyright (C) 2006  Aurelien Jacobs <aurel@gnuage.org>
5 *
6 * Based on LzmaDecode.c from the LZMA SDK 4.22 (http://www.7-zip.org/)
7 * Copyright (C) 1999-2005  Igor Pavlov
8 *
9 * Licensed under GPLv2 or later, see file LICENSE in this tarball for details.
10 */
11#include "libbb.h"
12#include "unarchive.h"
13
14#if ENABLE_FEATURE_LZMA_FAST
15#  define speed_inline ALWAYS_INLINE
16#  define size_inline
17#else
18#  define speed_inline
19#  define size_inline ALWAYS_INLINE
20#endif
21
22
23typedef struct {
24	int fd;
25	uint8_t *ptr;
26
27/* Was keeping rc on stack in unlzma and separately allocating buffer,
28 * but with "buffer 'attached to' allocated rc" code is smaller: */
29	/* uint8_t *buffer; */
30#define RC_BUFFER ((uint8_t*)(rc+1))
31
32	uint8_t *buffer_end;
33
34/* Had provisions for variable buffer, but we don't need it here */
35	/* int buffer_size; */
36#define RC_BUFFER_SIZE 0x10000
37
38	uint32_t code;
39	uint32_t range;
40	uint32_t bound;
41} rc_t;
42
43#define RC_TOP_BITS 24
44#define RC_MOVE_BITS 5
45#define RC_MODEL_TOTAL_BITS 11
46
47
48/* Called twice: once at startup (LZMA_FAST only) and once in rc_normalize() */
49static size_inline void rc_read(rc_t *rc)
50{
51	int buffer_size = safe_read(rc->fd, RC_BUFFER, RC_BUFFER_SIZE);
52//TODO: return -1 instead
53//This will make unlzma delete broken unpacked file on unpack errors
54	if (buffer_size <= 0)
55		bb_error_msg_and_die("unexpected EOF");
56	rc->ptr = RC_BUFFER;
57	rc->buffer_end = RC_BUFFER + buffer_size;
58}
59
60/* Called twice, but one callsite is in speed_inline'd rc_is_bit_1() */
61static void rc_do_normalize(rc_t *rc)
62{
63	if (rc->ptr >= rc->buffer_end)
64		rc_read(rc);
65	rc->range <<= 8;
66	rc->code = (rc->code << 8) | *rc->ptr++;
67}
68
69/* Called once */
70static ALWAYS_INLINE rc_t* rc_init(int fd) /*, int buffer_size) */
71{
72	int i;
73	rc_t *rc;
74
75	rc = xzalloc(sizeof(*rc) + RC_BUFFER_SIZE);
76
77	rc->fd = fd;
78	/* rc->ptr = rc->buffer_end; */
79
80	for (i = 0; i < 5; i++) {
81#if ENABLE_FEATURE_LZMA_FAST
82		if (rc->ptr >= rc->buffer_end)
83			rc_read(rc);
84		rc->code = (rc->code << 8) | *rc->ptr++;
85#else
86		rc_do_normalize(rc);
87#endif
88	}
89	rc->range = 0xFFFFFFFF;
90	return rc;
91}
92
93/* Called once  */
94static ALWAYS_INLINE void rc_free(rc_t *rc)
95{
96	free(rc);
97}
98
99static ALWAYS_INLINE void rc_normalize(rc_t *rc)
100{
101	if (rc->range < (1 << RC_TOP_BITS)) {
102		rc_do_normalize(rc);
103	}
104}
105
106/* rc_is_bit_1 is called 9 times */
107static speed_inline int rc_is_bit_1(rc_t *rc, uint16_t *p)
108{
109	rc_normalize(rc);
110	rc->bound = *p * (rc->range >> RC_MODEL_TOTAL_BITS);
111	if (rc->code < rc->bound) {
112		rc->range = rc->bound;
113		*p += ((1 << RC_MODEL_TOTAL_BITS) - *p) >> RC_MOVE_BITS;
114		return 0;
115	}
116	rc->range -= rc->bound;
117	rc->code -= rc->bound;
118	*p -= *p >> RC_MOVE_BITS;
119	return 1;
120}
121
122/* Called 4 times in unlzma loop */
123static speed_inline int rc_get_bit(rc_t *rc, uint16_t *p, int *symbol)
124{
125	int ret = rc_is_bit_1(rc, p);
126	*symbol = *symbol * 2 + ret;
127	return ret;
128}
129
130/* Called once */
131static ALWAYS_INLINE int rc_direct_bit(rc_t *rc)
132{
133	rc_normalize(rc);
134	rc->range >>= 1;
135	if (rc->code >= rc->range) {
136		rc->code -= rc->range;
137		return 1;
138	}
139	return 0;
140}
141
142/* Called twice */
143static speed_inline void
144rc_bit_tree_decode(rc_t *rc, uint16_t *p, int num_levels, int *symbol)
145{
146	int i = num_levels;
147
148	*symbol = 1;
149	while (i--)
150		rc_get_bit(rc, p + *symbol, symbol);
151	*symbol -= 1 << num_levels;
152}
153
154
155typedef struct {
156	uint8_t pos;
157	uint32_t dict_size;
158	uint64_t dst_size;
159} PACKED lzma_header_t;
160
161
162/* #defines will force compiler to compute/optimize each one with each usage.
163 * Have heart and use enum instead. */
164enum {
165	LZMA_BASE_SIZE = 1846,
166	LZMA_LIT_SIZE  = 768,
167
168	LZMA_NUM_POS_BITS_MAX = 4,
169
170	LZMA_LEN_NUM_LOW_BITS  = 3,
171	LZMA_LEN_NUM_MID_BITS  = 3,
172	LZMA_LEN_NUM_HIGH_BITS = 8,
173
174	LZMA_LEN_CHOICE     = 0,
175	LZMA_LEN_CHOICE_2   = (LZMA_LEN_CHOICE + 1),
176	LZMA_LEN_LOW        = (LZMA_LEN_CHOICE_2 + 1),
177	LZMA_LEN_MID        = (LZMA_LEN_LOW \
178	                      + (1 << (LZMA_NUM_POS_BITS_MAX + LZMA_LEN_NUM_LOW_BITS))),
179	LZMA_LEN_HIGH       = (LZMA_LEN_MID \
180	                      + (1 << (LZMA_NUM_POS_BITS_MAX + LZMA_LEN_NUM_MID_BITS))),
181	LZMA_NUM_LEN_PROBS  = (LZMA_LEN_HIGH + (1 << LZMA_LEN_NUM_HIGH_BITS)),
182
183	LZMA_NUM_STATES     = 12,
184	LZMA_NUM_LIT_STATES = 7,
185
186	LZMA_START_POS_MODEL_INDEX = 4,
187	LZMA_END_POS_MODEL_INDEX   = 14,
188	LZMA_NUM_FULL_DISTANCES    = (1 << (LZMA_END_POS_MODEL_INDEX >> 1)),
189
190	LZMA_NUM_POS_SLOT_BITS = 6,
191	LZMA_NUM_LEN_TO_POS_STATES = 4,
192
193	LZMA_NUM_ALIGN_BITS = 4,
194
195	LZMA_MATCH_MIN_LEN  = 2,
196
197	LZMA_IS_MATCH       = 0,
198	LZMA_IS_REP         = (LZMA_IS_MATCH + (LZMA_NUM_STATES << LZMA_NUM_POS_BITS_MAX)),
199	LZMA_IS_REP_G0      = (LZMA_IS_REP + LZMA_NUM_STATES),
200	LZMA_IS_REP_G1      = (LZMA_IS_REP_G0 + LZMA_NUM_STATES),
201	LZMA_IS_REP_G2      = (LZMA_IS_REP_G1 + LZMA_NUM_STATES),
202	LZMA_IS_REP_0_LONG  = (LZMA_IS_REP_G2 + LZMA_NUM_STATES),
203	LZMA_POS_SLOT       = (LZMA_IS_REP_0_LONG \
204	                      + (LZMA_NUM_STATES << LZMA_NUM_POS_BITS_MAX)),
205	LZMA_SPEC_POS       = (LZMA_POS_SLOT \
206	                      + (LZMA_NUM_LEN_TO_POS_STATES << LZMA_NUM_POS_SLOT_BITS)),
207	LZMA_ALIGN          = (LZMA_SPEC_POS \
208	                      + LZMA_NUM_FULL_DISTANCES - LZMA_END_POS_MODEL_INDEX),
209	LZMA_LEN_CODER      = (LZMA_ALIGN + (1 << LZMA_NUM_ALIGN_BITS)),
210	LZMA_REP_LEN_CODER  = (LZMA_LEN_CODER + LZMA_NUM_LEN_PROBS),
211	LZMA_LITERAL        = (LZMA_REP_LEN_CODER + LZMA_NUM_LEN_PROBS),
212};
213
214
215IF_DESKTOP(long long) int FAST_FUNC
216unpack_lzma_stream(int src_fd, int dst_fd)
217{
218	IF_DESKTOP(long long total_written = 0;)
219	lzma_header_t header;
220	int lc, pb, lp;
221	uint32_t pos_state_mask;
222	uint32_t literal_pos_mask;
223	uint16_t *p;
224	int num_bits;
225	int num_probs;
226	rc_t *rc;
227	int i;
228	uint8_t *buffer;
229	uint8_t previous_byte = 0;
230	size_t buffer_pos = 0, global_pos = 0;
231	int len = 0;
232	int state = 0;
233	uint32_t rep0 = 1, rep1 = 1, rep2 = 1, rep3 = 1;
234
235	if (full_read(src_fd, &header, sizeof(header)) != sizeof(header)
236	 || header.pos >= (9 * 5 * 5)
237	) {
238		bb_error_msg("bad lzma header");
239		return -1;
240	}
241
242	i = header.pos / 9;
243	lc = header.pos % 9;
244	pb = i / 5;
245	lp = i % 5;
246	pos_state_mask = (1 << pb) - 1;
247	literal_pos_mask = (1 << lp) - 1;
248
249	header.dict_size = SWAP_LE32(header.dict_size);
250	header.dst_size = SWAP_LE64(header.dst_size);
251
252	if (header.dict_size == 0)
253		header.dict_size++;
254
255	buffer = xmalloc(MIN(header.dst_size, header.dict_size));
256
257	num_probs = LZMA_BASE_SIZE + (LZMA_LIT_SIZE << (lc + lp));
258	p = xmalloc(num_probs * sizeof(*p));
259	num_probs += LZMA_LITERAL - LZMA_BASE_SIZE;
260	for (i = 0; i < num_probs; i++)
261		p[i] = (1 << RC_MODEL_TOTAL_BITS) >> 1;
262
263	rc = rc_init(src_fd); /*, RC_BUFFER_SIZE); */
264
265	while (global_pos + buffer_pos < header.dst_size) {
266		int pos_state = (buffer_pos + global_pos) & pos_state_mask;
267		uint16_t *prob = p + LZMA_IS_MATCH + (state << LZMA_NUM_POS_BITS_MAX) + pos_state;
268
269		if (!rc_is_bit_1(rc, prob)) {
270			static const char next_state[LZMA_NUM_STATES] =
271				{ 0, 0, 0, 0, 1, 2, 3, 4, 5, 6, 4, 5 };
272			int mi = 1;
273
274			prob = (p + LZMA_LITERAL
275			        + (LZMA_LIT_SIZE * ((((buffer_pos + global_pos) & literal_pos_mask) << lc)
276			                            + (previous_byte >> (8 - lc))
277			                           )
278			          )
279			);
280
281			if (state >= LZMA_NUM_LIT_STATES) {
282				int match_byte;
283				uint32_t pos = buffer_pos - rep0;
284
285				while (pos >= header.dict_size)
286					pos += header.dict_size;
287				match_byte = buffer[pos];
288				do {
289					int bit;
290
291					match_byte <<= 1;
292					bit = match_byte & 0x100;
293					bit ^= (rc_get_bit(rc, prob + 0x100 + bit + mi, &mi) << 8); /* 0x100 or 0 */
294					if (bit)
295						break;
296				} while (mi < 0x100);
297			}
298			while (mi < 0x100) {
299				rc_get_bit(rc, prob + mi, &mi);
300			}
301
302			state = next_state[state];
303
304			previous_byte = (uint8_t) mi;
305#if ENABLE_FEATURE_LZMA_FAST
306 one_byte1:
307			buffer[buffer_pos++] = previous_byte;
308			if (buffer_pos == header.dict_size) {
309				buffer_pos = 0;
310				global_pos += header.dict_size;
311				if (full_write(dst_fd, buffer, header.dict_size) != (ssize_t)header.dict_size)
312					goto bad;
313				IF_DESKTOP(total_written += header.dict_size;)
314			}
315#else
316			len = 1;
317			goto one_byte2;
318#endif
319		} else {
320			int offset;
321			uint16_t *prob2;
322#define prob_len prob2
323
324			prob2 = p + LZMA_IS_REP + state;
325			if (!rc_is_bit_1(rc, prob2)) {
326				rep3 = rep2;
327				rep2 = rep1;
328				rep1 = rep0;
329				state = state < LZMA_NUM_LIT_STATES ? 0 : 3;
330				prob2 = p + LZMA_LEN_CODER;
331			} else {
332				prob2 += LZMA_IS_REP_G0 - LZMA_IS_REP;
333				if (!rc_is_bit_1(rc, prob2)) {
334					prob2 = (p + LZMA_IS_REP_0_LONG
335					        + (state << LZMA_NUM_POS_BITS_MAX)
336					        + pos_state
337					);
338					if (!rc_is_bit_1(rc, prob2)) {
339#if ENABLE_FEATURE_LZMA_FAST
340						uint32_t pos = buffer_pos - rep0;
341						state = state < LZMA_NUM_LIT_STATES ? 9 : 11;
342						while (pos >= header.dict_size)
343							pos += header.dict_size;
344						previous_byte = buffer[pos];
345						goto one_byte1;
346#else
347						state = state < LZMA_NUM_LIT_STATES ? 9 : 11;
348						len = 1;
349						goto string;
350#endif
351					}
352				} else {
353					uint32_t distance;
354
355					prob2 += LZMA_IS_REP_G1 - LZMA_IS_REP_G0;
356					distance = rep1;
357					if (rc_is_bit_1(rc, prob2)) {
358						prob2 += LZMA_IS_REP_G2 - LZMA_IS_REP_G1;
359						distance = rep2;
360						if (rc_is_bit_1(rc, prob2)) {
361							distance = rep3;
362							rep3 = rep2;
363						}
364						rep2 = rep1;
365					}
366					rep1 = rep0;
367					rep0 = distance;
368				}
369				state = state < LZMA_NUM_LIT_STATES ? 8 : 11;
370				prob2 = p + LZMA_REP_LEN_CODER;
371			}
372
373			prob_len = prob2 + LZMA_LEN_CHOICE;
374			num_bits = LZMA_LEN_NUM_LOW_BITS;
375			if (!rc_is_bit_1(rc, prob_len)) {
376				prob_len += LZMA_LEN_LOW - LZMA_LEN_CHOICE
377				            + (pos_state << LZMA_LEN_NUM_LOW_BITS);
378				offset = 0;
379			} else {
380				prob_len += LZMA_LEN_CHOICE_2 - LZMA_LEN_CHOICE;
381				if (!rc_is_bit_1(rc, prob_len)) {
382					prob_len += LZMA_LEN_MID - LZMA_LEN_CHOICE_2
383					            + (pos_state << LZMA_LEN_NUM_MID_BITS);
384					offset = 1 << LZMA_LEN_NUM_LOW_BITS;
385					num_bits += LZMA_LEN_NUM_MID_BITS - LZMA_LEN_NUM_LOW_BITS;
386				} else {
387					prob_len += LZMA_LEN_HIGH - LZMA_LEN_CHOICE_2;
388					offset = ((1 << LZMA_LEN_NUM_LOW_BITS)
389					          + (1 << LZMA_LEN_NUM_MID_BITS));
390					num_bits += LZMA_LEN_NUM_HIGH_BITS - LZMA_LEN_NUM_LOW_BITS;
391				}
392			}
393			rc_bit_tree_decode(rc, prob_len, num_bits, &len);
394			len += offset;
395
396			if (state < 4) {
397				int pos_slot;
398				uint16_t *prob3;
399
400				state += LZMA_NUM_LIT_STATES;
401				prob3 = p + LZMA_POS_SLOT +
402				       ((len < LZMA_NUM_LEN_TO_POS_STATES ? len :
403				         LZMA_NUM_LEN_TO_POS_STATES - 1)
404				         << LZMA_NUM_POS_SLOT_BITS);
405				rc_bit_tree_decode(rc, prob3,
406					LZMA_NUM_POS_SLOT_BITS, &pos_slot);
407				rep0 = pos_slot;
408				if (pos_slot >= LZMA_START_POS_MODEL_INDEX) {
409					int i2, mi2, num_bits2 = (pos_slot >> 1) - 1;
410					rep0 = 2 | (pos_slot & 1);
411					if (pos_slot < LZMA_END_POS_MODEL_INDEX) {
412						rep0 <<= num_bits2;
413						prob3 = p + LZMA_SPEC_POS + rep0 - pos_slot - 1;
414					} else {
415						for (; num_bits2 != LZMA_NUM_ALIGN_BITS; num_bits2--)
416							rep0 = (rep0 << 1) | rc_direct_bit(rc);
417						rep0 <<= LZMA_NUM_ALIGN_BITS;
418						prob3 = p + LZMA_ALIGN;
419					}
420					i2 = 1;
421					mi2 = 1;
422					while (num_bits2--) {
423						if (rc_get_bit(rc, prob3 + mi2, &mi2))
424							rep0 |= i2;
425						i2 <<= 1;
426					}
427				}
428				if (++rep0 == 0)
429					break;
430			}
431
432			len += LZMA_MATCH_MIN_LEN;
433 IF_NOT_FEATURE_LZMA_FAST(string:)
434			do {
435				uint32_t pos = buffer_pos - rep0;
436				while (pos >= header.dict_size)
437					pos += header.dict_size;
438				previous_byte = buffer[pos];
439 IF_NOT_FEATURE_LZMA_FAST(one_byte2:)
440				buffer[buffer_pos++] = previous_byte;
441				if (buffer_pos == header.dict_size) {
442					buffer_pos = 0;
443					global_pos += header.dict_size;
444					if (full_write(dst_fd, buffer, header.dict_size) != (ssize_t)header.dict_size)
445						goto bad;
446					IF_DESKTOP(total_written += header.dict_size;)
447				}
448				len--;
449			} while (len != 0 && buffer_pos < header.dst_size);
450		}
451	}
452
453	{
454		IF_NOT_DESKTOP(int total_written = 0; /* success */)
455		IF_DESKTOP(total_written += buffer_pos;)
456		if (full_write(dst_fd, buffer, buffer_pos) != (ssize_t)buffer_pos) {
457 bad:
458			total_written = -1; /* failure */
459		}
460		rc_free(rc);
461		free(p);
462		free(buffer);
463		return total_written;
464	}
465}
466