This source file includes following definitions.
- dict_reset
- dict_limit
- dict_has_space
- dict_get
- dict_put
- dict_repeat
- dict_uncompressed
- dict_flush
- rc_reset
- rc_read_init
- rc_limit_exceeded
- rc_is_finished
- rc_normalize
- rc_bit
- rc_bittree
- rc_bittree_reverse
- rc_direct
- lzma_literal_probs
- lzma_literal
- lzma_len
- lzma_match
- lzma_rep_match
- lzma_main
- lzma_reset
- lzma_props
- lzma2_lzma
- xz_dec_lzma2_run
- xz_dec_lzma2_create
- xz_dec_lzma2_reset
- xz_dec_lzma2_end
1
2
3
4
5
6
7
8
9
10
11 #include "xz_private.h"
12 #include "xz_lzma2.h"
13
14
15
16
17 #define RC_INIT_BYTES 5
18
19
20
21
22
23
24
25
26 #define LZMA_IN_REQUIRED 21
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44 struct dictionary {
45
46 uint8_t *buf;
47
48
49 size_t start;
50
51
52 size_t pos;
53
54
55
56
57
58 size_t full;
59
60
61 size_t limit;
62
63
64
65
66
67
68 size_t end;
69
70
71
72
73
74
75 uint32_t size;
76
77
78
79
80
81 uint32_t size_max;
82
83
84
85
86
87
88 uint32_t allocated;
89
90
91 enum xz_mode mode;
92 };
93
94
95 struct rc_dec {
96 uint32_t range;
97 uint32_t code;
98
99
100
101
102
103 uint32_t init_bytes_left;
104
105
106
107
108
109 const uint8_t *in;
110 size_t in_pos;
111 size_t in_limit;
112 };
113
114
115 struct lzma_len_dec {
116
117 uint16_t choice;
118
119
120 uint16_t choice2;
121
122
123 uint16_t low[POS_STATES_MAX][LEN_LOW_SYMBOLS];
124
125
126 uint16_t mid[POS_STATES_MAX][LEN_MID_SYMBOLS];
127
128
129 uint16_t high[LEN_HIGH_SYMBOLS];
130 };
131
132 struct lzma_dec {
133
134 uint32_t rep0;
135 uint32_t rep1;
136 uint32_t rep2;
137 uint32_t rep3;
138
139
140 enum lzma_state state;
141
142
143
144
145
146 uint32_t len;
147
148
149
150
151
152
153
154 uint32_t lc;
155 uint32_t literal_pos_mask;
156 uint32_t pos_mask;
157
158
159 uint16_t is_match[STATES][POS_STATES_MAX];
160
161
162 uint16_t is_rep[STATES];
163
164
165
166
167
168 uint16_t is_rep0[STATES];
169
170
171
172
173
174 uint16_t is_rep1[STATES];
175
176
177 uint16_t is_rep2[STATES];
178
179
180
181
182
183 uint16_t is_rep0_long[STATES][POS_STATES_MAX];
184
185
186
187
188
189
190 uint16_t dist_slot[DIST_STATES][DIST_SLOTS];
191
192
193
194
195
196 uint16_t dist_special[FULL_DISTANCES - DIST_MODEL_END];
197
198
199
200
201
202 uint16_t dist_align[ALIGN_SIZE];
203
204
205 struct lzma_len_dec match_len_dec;
206
207
208 struct lzma_len_dec rep_len_dec;
209
210
211 uint16_t literal[LITERAL_CODERS_MAX][LITERAL_CODER_SIZE];
212 };
213
214 struct lzma2_dec {
215
216 enum lzma2_seq {
217 SEQ_CONTROL,
218 SEQ_UNCOMPRESSED_1,
219 SEQ_UNCOMPRESSED_2,
220 SEQ_COMPRESSED_0,
221 SEQ_COMPRESSED_1,
222 SEQ_PROPERTIES,
223 SEQ_LZMA_PREPARE,
224 SEQ_LZMA_RUN,
225 SEQ_COPY
226 } sequence;
227
228
229 enum lzma2_seq next_sequence;
230
231
232 uint32_t uncompressed;
233
234
235
236
237
238 uint32_t compressed;
239
240
241
242
243
244 bool need_dict_reset;
245
246
247
248
249
250 bool need_props;
251 };
252
253 struct xz_dec_lzma2 {
254
255
256
257
258
259
260
261
262
263 struct rc_dec rc;
264 struct dictionary dict;
265 struct lzma2_dec lzma2;
266 struct lzma_dec lzma;
267
268
269
270
271
272 struct {
273 uint32_t size;
274 uint8_t buf[3 * LZMA_IN_REQUIRED];
275 } temp;
276 };
277
278
279
280
281
282
283
284
285
286 static void dict_reset(struct dictionary *dict, struct xz_buf *b)
287 {
288 if (DEC_IS_SINGLE(dict->mode)) {
289 dict->buf = b->out + b->out_pos;
290 dict->end = b->out_size - b->out_pos;
291 }
292
293 dict->start = 0;
294 dict->pos = 0;
295 dict->limit = 0;
296 dict->full = 0;
297 }
298
299
300 static void dict_limit(struct dictionary *dict, size_t out_max)
301 {
302 if (dict->end - dict->pos <= out_max)
303 dict->limit = dict->end;
304 else
305 dict->limit = dict->pos + out_max;
306 }
307
308
309 static inline bool dict_has_space(const struct dictionary *dict)
310 {
311 return dict->pos < dict->limit;
312 }
313
314
315
316
317
318
319
320 static inline uint32_t dict_get(const struct dictionary *dict, uint32_t dist)
321 {
322 size_t offset = dict->pos - dist - 1;
323
324 if (dist >= dict->pos)
325 offset += dict->end;
326
327 return dict->full > 0 ? dict->buf[offset] : 0;
328 }
329
330
331
332
333 static inline void dict_put(struct dictionary *dict, uint8_t byte)
334 {
335 dict->buf[dict->pos++] = byte;
336
337 if (dict->full < dict->pos)
338 dict->full = dict->pos;
339 }
340
341
342
343
344
345
346 static bool dict_repeat(struct dictionary *dict, uint32_t *len, uint32_t dist)
347 {
348 size_t back;
349 uint32_t left;
350
351 if (dist >= dict->full || dist >= dict->size)
352 return false;
353
354 left = min_t(size_t, dict->limit - dict->pos, *len);
355 *len -= left;
356
357 back = dict->pos - dist - 1;
358 if (dist >= dict->pos)
359 back += dict->end;
360
361 do {
362 dict->buf[dict->pos++] = dict->buf[back++];
363 if (back == dict->end)
364 back = 0;
365 } while (--left > 0);
366
367 if (dict->full < dict->pos)
368 dict->full = dict->pos;
369
370 return true;
371 }
372
373
374 static void dict_uncompressed(struct dictionary *dict, struct xz_buf *b,
375 uint32_t *left)
376 {
377 size_t copy_size;
378
379 while (*left > 0 && b->in_pos < b->in_size
380 && b->out_pos < b->out_size) {
381 copy_size = min(b->in_size - b->in_pos,
382 b->out_size - b->out_pos);
383 if (copy_size > dict->end - dict->pos)
384 copy_size = dict->end - dict->pos;
385 if (copy_size > *left)
386 copy_size = *left;
387
388 *left -= copy_size;
389
390 memcpy(dict->buf + dict->pos, b->in + b->in_pos, copy_size);
391 dict->pos += copy_size;
392
393 if (dict->full < dict->pos)
394 dict->full = dict->pos;
395
396 if (DEC_IS_MULTI(dict->mode)) {
397 if (dict->pos == dict->end)
398 dict->pos = 0;
399
400 memcpy(b->out + b->out_pos, b->in + b->in_pos,
401 copy_size);
402 }
403
404 dict->start = dict->pos;
405
406 b->out_pos += copy_size;
407 b->in_pos += copy_size;
408 }
409 }
410
411
412
413
414
415
416 static uint32_t dict_flush(struct dictionary *dict, struct xz_buf *b)
417 {
418 size_t copy_size = dict->pos - dict->start;
419
420 if (DEC_IS_MULTI(dict->mode)) {
421 if (dict->pos == dict->end)
422 dict->pos = 0;
423
424 memcpy(b->out + b->out_pos, dict->buf + dict->start,
425 copy_size);
426 }
427
428 dict->start = dict->pos;
429 b->out_pos += copy_size;
430 return copy_size;
431 }
432
433
434
435
436
437
438 static void rc_reset(struct rc_dec *rc)
439 {
440 rc->range = (uint32_t)-1;
441 rc->code = 0;
442 rc->init_bytes_left = RC_INIT_BYTES;
443 }
444
445
446
447
448
449 static bool rc_read_init(struct rc_dec *rc, struct xz_buf *b)
450 {
451 while (rc->init_bytes_left > 0) {
452 if (b->in_pos == b->in_size)
453 return false;
454
455 rc->code = (rc->code << 8) + b->in[b->in_pos++];
456 --rc->init_bytes_left;
457 }
458
459 return true;
460 }
461
462
463 static inline bool rc_limit_exceeded(const struct rc_dec *rc)
464 {
465 return rc->in_pos > rc->in_limit;
466 }
467
468
469
470
471
472 static inline bool rc_is_finished(const struct rc_dec *rc)
473 {
474 return rc->code == 0;
475 }
476
477
478 static __always_inline void rc_normalize(struct rc_dec *rc)
479 {
480 if (rc->range < RC_TOP_VALUE) {
481 rc->range <<= RC_SHIFT_BITS;
482 rc->code = (rc->code << RC_SHIFT_BITS) + rc->in[rc->in_pos++];
483 }
484 }
485
486
487
488
489
490
491
492
493
494
495
496
497 static __always_inline int rc_bit(struct rc_dec *rc, uint16_t *prob)
498 {
499 uint32_t bound;
500 int bit;
501
502 rc_normalize(rc);
503 bound = (rc->range >> RC_BIT_MODEL_TOTAL_BITS) * *prob;
504 if (rc->code < bound) {
505 rc->range = bound;
506 *prob += (RC_BIT_MODEL_TOTAL - *prob) >> RC_MOVE_BITS;
507 bit = 0;
508 } else {
509 rc->range -= bound;
510 rc->code -= bound;
511 *prob -= *prob >> RC_MOVE_BITS;
512 bit = 1;
513 }
514
515 return bit;
516 }
517
518
519 static __always_inline uint32_t rc_bittree(struct rc_dec *rc,
520 uint16_t *probs, uint32_t limit)
521 {
522 uint32_t symbol = 1;
523
524 do {
525 if (rc_bit(rc, &probs[symbol]))
526 symbol = (symbol << 1) + 1;
527 else
528 symbol <<= 1;
529 } while (symbol < limit);
530
531 return symbol;
532 }
533
534
535 static __always_inline void rc_bittree_reverse(struct rc_dec *rc,
536 uint16_t *probs,
537 uint32_t *dest, uint32_t limit)
538 {
539 uint32_t symbol = 1;
540 uint32_t i = 0;
541
542 do {
543 if (rc_bit(rc, &probs[symbol])) {
544 symbol = (symbol << 1) + 1;
545 *dest += 1 << i;
546 } else {
547 symbol <<= 1;
548 }
549 } while (++i < limit);
550 }
551
552
553 static inline void rc_direct(struct rc_dec *rc, uint32_t *dest, uint32_t limit)
554 {
555 uint32_t mask;
556
557 do {
558 rc_normalize(rc);
559 rc->range >>= 1;
560 rc->code -= rc->range;
561 mask = (uint32_t)0 - (rc->code >> 31);
562 rc->code += rc->range & mask;
563 *dest = (*dest << 1) + (mask + 1);
564 } while (--limit > 0);
565 }
566
567
568
569
570
571
572 static uint16_t *lzma_literal_probs(struct xz_dec_lzma2 *s)
573 {
574 uint32_t prev_byte = dict_get(&s->dict, 0);
575 uint32_t low = prev_byte >> (8 - s->lzma.lc);
576 uint32_t high = (s->dict.pos & s->lzma.literal_pos_mask) << s->lzma.lc;
577 return s->lzma.literal[low + high];
578 }
579
580
581 static void lzma_literal(struct xz_dec_lzma2 *s)
582 {
583 uint16_t *probs;
584 uint32_t symbol;
585 uint32_t match_byte;
586 uint32_t match_bit;
587 uint32_t offset;
588 uint32_t i;
589
590 probs = lzma_literal_probs(s);
591
592 if (lzma_state_is_literal(s->lzma.state)) {
593 symbol = rc_bittree(&s->rc, probs, 0x100);
594 } else {
595 symbol = 1;
596 match_byte = dict_get(&s->dict, s->lzma.rep0) << 1;
597 offset = 0x100;
598
599 do {
600 match_bit = match_byte & offset;
601 match_byte <<= 1;
602 i = offset + match_bit + symbol;
603
604 if (rc_bit(&s->rc, &probs[i])) {
605 symbol = (symbol << 1) + 1;
606 offset &= match_bit;
607 } else {
608 symbol <<= 1;
609 offset &= ~match_bit;
610 }
611 } while (symbol < 0x100);
612 }
613
614 dict_put(&s->dict, (uint8_t)symbol);
615 lzma_state_literal(&s->lzma.state);
616 }
617
618
619 static void lzma_len(struct xz_dec_lzma2 *s, struct lzma_len_dec *l,
620 uint32_t pos_state)
621 {
622 uint16_t *probs;
623 uint32_t limit;
624
625 if (!rc_bit(&s->rc, &l->choice)) {
626 probs = l->low[pos_state];
627 limit = LEN_LOW_SYMBOLS;
628 s->lzma.len = MATCH_LEN_MIN;
629 } else {
630 if (!rc_bit(&s->rc, &l->choice2)) {
631 probs = l->mid[pos_state];
632 limit = LEN_MID_SYMBOLS;
633 s->lzma.len = MATCH_LEN_MIN + LEN_LOW_SYMBOLS;
634 } else {
635 probs = l->high;
636 limit = LEN_HIGH_SYMBOLS;
637 s->lzma.len = MATCH_LEN_MIN + LEN_LOW_SYMBOLS
638 + LEN_MID_SYMBOLS;
639 }
640 }
641
642 s->lzma.len += rc_bittree(&s->rc, probs, limit) - limit;
643 }
644
645
646 static void lzma_match(struct xz_dec_lzma2 *s, uint32_t pos_state)
647 {
648 uint16_t *probs;
649 uint32_t dist_slot;
650 uint32_t limit;
651
652 lzma_state_match(&s->lzma.state);
653
654 s->lzma.rep3 = s->lzma.rep2;
655 s->lzma.rep2 = s->lzma.rep1;
656 s->lzma.rep1 = s->lzma.rep0;
657
658 lzma_len(s, &s->lzma.match_len_dec, pos_state);
659
660 probs = s->lzma.dist_slot[lzma_get_dist_state(s->lzma.len)];
661 dist_slot = rc_bittree(&s->rc, probs, DIST_SLOTS) - DIST_SLOTS;
662
663 if (dist_slot < DIST_MODEL_START) {
664 s->lzma.rep0 = dist_slot;
665 } else {
666 limit = (dist_slot >> 1) - 1;
667 s->lzma.rep0 = 2 + (dist_slot & 1);
668
669 if (dist_slot < DIST_MODEL_END) {
670 s->lzma.rep0 <<= limit;
671 probs = s->lzma.dist_special + s->lzma.rep0
672 - dist_slot - 1;
673 rc_bittree_reverse(&s->rc, probs,
674 &s->lzma.rep0, limit);
675 } else {
676 rc_direct(&s->rc, &s->lzma.rep0, limit - ALIGN_BITS);
677 s->lzma.rep0 <<= ALIGN_BITS;
678 rc_bittree_reverse(&s->rc, s->lzma.dist_align,
679 &s->lzma.rep0, ALIGN_BITS);
680 }
681 }
682 }
683
684
685
686
687
688 static void lzma_rep_match(struct xz_dec_lzma2 *s, uint32_t pos_state)
689 {
690 uint32_t tmp;
691
692 if (!rc_bit(&s->rc, &s->lzma.is_rep0[s->lzma.state])) {
693 if (!rc_bit(&s->rc, &s->lzma.is_rep0_long[
694 s->lzma.state][pos_state])) {
695 lzma_state_short_rep(&s->lzma.state);
696 s->lzma.len = 1;
697 return;
698 }
699 } else {
700 if (!rc_bit(&s->rc, &s->lzma.is_rep1[s->lzma.state])) {
701 tmp = s->lzma.rep1;
702 } else {
703 if (!rc_bit(&s->rc, &s->lzma.is_rep2[s->lzma.state])) {
704 tmp = s->lzma.rep2;
705 } else {
706 tmp = s->lzma.rep3;
707 s->lzma.rep3 = s->lzma.rep2;
708 }
709
710 s->lzma.rep2 = s->lzma.rep1;
711 }
712
713 s->lzma.rep1 = s->lzma.rep0;
714 s->lzma.rep0 = tmp;
715 }
716
717 lzma_state_long_rep(&s->lzma.state);
718 lzma_len(s, &s->lzma.rep_len_dec, pos_state);
719 }
720
721
722 static bool lzma_main(struct xz_dec_lzma2 *s)
723 {
724 uint32_t pos_state;
725
726
727
728
729
730 if (dict_has_space(&s->dict) && s->lzma.len > 0)
731 dict_repeat(&s->dict, &s->lzma.len, s->lzma.rep0);
732
733
734
735
736
737 while (dict_has_space(&s->dict) && !rc_limit_exceeded(&s->rc)) {
738 pos_state = s->dict.pos & s->lzma.pos_mask;
739
740 if (!rc_bit(&s->rc, &s->lzma.is_match[
741 s->lzma.state][pos_state])) {
742 lzma_literal(s);
743 } else {
744 if (rc_bit(&s->rc, &s->lzma.is_rep[s->lzma.state]))
745 lzma_rep_match(s, pos_state);
746 else
747 lzma_match(s, pos_state);
748
749 if (!dict_repeat(&s->dict, &s->lzma.len, s->lzma.rep0))
750 return false;
751 }
752 }
753
754
755
756
757
758 rc_normalize(&s->rc);
759
760 return true;
761 }
762
763
764
765
766
767 static void lzma_reset(struct xz_dec_lzma2 *s)
768 {
769 uint16_t *probs;
770 size_t i;
771
772 s->lzma.state = STATE_LIT_LIT;
773 s->lzma.rep0 = 0;
774 s->lzma.rep1 = 0;
775 s->lzma.rep2 = 0;
776 s->lzma.rep3 = 0;
777
778
779
780
781
782
783
784
785
786
787 probs = s->lzma.is_match[0];
788 for (i = 0; i < PROBS_TOTAL; ++i)
789 probs[i] = RC_BIT_MODEL_TOTAL / 2;
790
791 rc_reset(&s->rc);
792 }
793
794
795
796
797
798
799 static bool lzma_props(struct xz_dec_lzma2 *s, uint8_t props)
800 {
801 if (props > (4 * 5 + 4) * 9 + 8)
802 return false;
803
804 s->lzma.pos_mask = 0;
805 while (props >= 9 * 5) {
806 props -= 9 * 5;
807 ++s->lzma.pos_mask;
808 }
809
810 s->lzma.pos_mask = (1 << s->lzma.pos_mask) - 1;
811
812 s->lzma.literal_pos_mask = 0;
813 while (props >= 9) {
814 props -= 9;
815 ++s->lzma.literal_pos_mask;
816 }
817
818 s->lzma.lc = props;
819
820 if (s->lzma.lc + s->lzma.literal_pos_mask > 4)
821 return false;
822
823 s->lzma.literal_pos_mask = (1 << s->lzma.literal_pos_mask) - 1;
824
825 lzma_reset(s);
826
827 return true;
828 }
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846 static bool lzma2_lzma(struct xz_dec_lzma2 *s, struct xz_buf *b)
847 {
848 size_t in_avail;
849 uint32_t tmp;
850
851 in_avail = b->in_size - b->in_pos;
852 if (s->temp.size > 0 || s->lzma2.compressed == 0) {
853 tmp = 2 * LZMA_IN_REQUIRED - s->temp.size;
854 if (tmp > s->lzma2.compressed - s->temp.size)
855 tmp = s->lzma2.compressed - s->temp.size;
856 if (tmp > in_avail)
857 tmp = in_avail;
858
859 memcpy(s->temp.buf + s->temp.size, b->in + b->in_pos, tmp);
860
861 if (s->temp.size + tmp == s->lzma2.compressed) {
862 memzero(s->temp.buf + s->temp.size + tmp,
863 sizeof(s->temp.buf)
864 - s->temp.size - tmp);
865 s->rc.in_limit = s->temp.size + tmp;
866 } else if (s->temp.size + tmp < LZMA_IN_REQUIRED) {
867 s->temp.size += tmp;
868 b->in_pos += tmp;
869 return true;
870 } else {
871 s->rc.in_limit = s->temp.size + tmp - LZMA_IN_REQUIRED;
872 }
873
874 s->rc.in = s->temp.buf;
875 s->rc.in_pos = 0;
876
877 if (!lzma_main(s) || s->rc.in_pos > s->temp.size + tmp)
878 return false;
879
880 s->lzma2.compressed -= s->rc.in_pos;
881
882 if (s->rc.in_pos < s->temp.size) {
883 s->temp.size -= s->rc.in_pos;
884 memmove(s->temp.buf, s->temp.buf + s->rc.in_pos,
885 s->temp.size);
886 return true;
887 }
888
889 b->in_pos += s->rc.in_pos - s->temp.size;
890 s->temp.size = 0;
891 }
892
893 in_avail = b->in_size - b->in_pos;
894 if (in_avail >= LZMA_IN_REQUIRED) {
895 s->rc.in = b->in;
896 s->rc.in_pos = b->in_pos;
897
898 if (in_avail >= s->lzma2.compressed + LZMA_IN_REQUIRED)
899 s->rc.in_limit = b->in_pos + s->lzma2.compressed;
900 else
901 s->rc.in_limit = b->in_size - LZMA_IN_REQUIRED;
902
903 if (!lzma_main(s))
904 return false;
905
906 in_avail = s->rc.in_pos - b->in_pos;
907 if (in_avail > s->lzma2.compressed)
908 return false;
909
910 s->lzma2.compressed -= in_avail;
911 b->in_pos = s->rc.in_pos;
912 }
913
914 in_avail = b->in_size - b->in_pos;
915 if (in_avail < LZMA_IN_REQUIRED) {
916 if (in_avail > s->lzma2.compressed)
917 in_avail = s->lzma2.compressed;
918
919 memcpy(s->temp.buf, b->in + b->in_pos, in_avail);
920 s->temp.size = in_avail;
921 b->in_pos += in_avail;
922 }
923
924 return true;
925 }
926
927
928
929
930
931 XZ_EXTERN enum xz_ret xz_dec_lzma2_run(struct xz_dec_lzma2 *s,
932 struct xz_buf *b)
933 {
934 uint32_t tmp;
935
936 while (b->in_pos < b->in_size || s->lzma2.sequence == SEQ_LZMA_RUN) {
937 switch (s->lzma2.sequence) {
938 case SEQ_CONTROL:
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970 tmp = b->in[b->in_pos++];
971
972 if (tmp == 0x00)
973 return XZ_STREAM_END;
974
975 if (tmp >= 0xE0 || tmp == 0x01) {
976 s->lzma2.need_props = true;
977 s->lzma2.need_dict_reset = false;
978 dict_reset(&s->dict, b);
979 } else if (s->lzma2.need_dict_reset) {
980 return XZ_DATA_ERROR;
981 }
982
983 if (tmp >= 0x80) {
984 s->lzma2.uncompressed = (tmp & 0x1F) << 16;
985 s->lzma2.sequence = SEQ_UNCOMPRESSED_1;
986
987 if (tmp >= 0xC0) {
988
989
990
991
992
993 s->lzma2.need_props = false;
994 s->lzma2.next_sequence
995 = SEQ_PROPERTIES;
996
997 } else if (s->lzma2.need_props) {
998 return XZ_DATA_ERROR;
999
1000 } else {
1001 s->lzma2.next_sequence
1002 = SEQ_LZMA_PREPARE;
1003 if (tmp >= 0xA0)
1004 lzma_reset(s);
1005 }
1006 } else {
1007 if (tmp > 0x02)
1008 return XZ_DATA_ERROR;
1009
1010 s->lzma2.sequence = SEQ_COMPRESSED_0;
1011 s->lzma2.next_sequence = SEQ_COPY;
1012 }
1013
1014 break;
1015
1016 case SEQ_UNCOMPRESSED_1:
1017 s->lzma2.uncompressed
1018 += (uint32_t)b->in[b->in_pos++] << 8;
1019 s->lzma2.sequence = SEQ_UNCOMPRESSED_2;
1020 break;
1021
1022 case SEQ_UNCOMPRESSED_2:
1023 s->lzma2.uncompressed
1024 += (uint32_t)b->in[b->in_pos++] + 1;
1025 s->lzma2.sequence = SEQ_COMPRESSED_0;
1026 break;
1027
1028 case SEQ_COMPRESSED_0:
1029 s->lzma2.compressed
1030 = (uint32_t)b->in[b->in_pos++] << 8;
1031 s->lzma2.sequence = SEQ_COMPRESSED_1;
1032 break;
1033
1034 case SEQ_COMPRESSED_1:
1035 s->lzma2.compressed
1036 += (uint32_t)b->in[b->in_pos++] + 1;
1037 s->lzma2.sequence = s->lzma2.next_sequence;
1038 break;
1039
1040 case SEQ_PROPERTIES:
1041 if (!lzma_props(s, b->in[b->in_pos++]))
1042 return XZ_DATA_ERROR;
1043
1044 s->lzma2.sequence = SEQ_LZMA_PREPARE;
1045
1046
1047
1048 case SEQ_LZMA_PREPARE:
1049 if (s->lzma2.compressed < RC_INIT_BYTES)
1050 return XZ_DATA_ERROR;
1051
1052 if (!rc_read_init(&s->rc, b))
1053 return XZ_OK;
1054
1055 s->lzma2.compressed -= RC_INIT_BYTES;
1056 s->lzma2.sequence = SEQ_LZMA_RUN;
1057
1058
1059
1060 case SEQ_LZMA_RUN:
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070 dict_limit(&s->dict, min_t(size_t,
1071 b->out_size - b->out_pos,
1072 s->lzma2.uncompressed));
1073 if (!lzma2_lzma(s, b))
1074 return XZ_DATA_ERROR;
1075
1076 s->lzma2.uncompressed -= dict_flush(&s->dict, b);
1077
1078 if (s->lzma2.uncompressed == 0) {
1079 if (s->lzma2.compressed > 0 || s->lzma.len > 0
1080 || !rc_is_finished(&s->rc))
1081 return XZ_DATA_ERROR;
1082
1083 rc_reset(&s->rc);
1084 s->lzma2.sequence = SEQ_CONTROL;
1085
1086 } else if (b->out_pos == b->out_size
1087 || (b->in_pos == b->in_size
1088 && s->temp.size
1089 < s->lzma2.compressed)) {
1090 return XZ_OK;
1091 }
1092
1093 break;
1094
1095 case SEQ_COPY:
1096 dict_uncompressed(&s->dict, b, &s->lzma2.compressed);
1097 if (s->lzma2.compressed > 0)
1098 return XZ_OK;
1099
1100 s->lzma2.sequence = SEQ_CONTROL;
1101 break;
1102 }
1103 }
1104
1105 return XZ_OK;
1106 }
1107
1108 XZ_EXTERN struct xz_dec_lzma2 *xz_dec_lzma2_create(enum xz_mode mode,
1109 uint32_t dict_max)
1110 {
1111 struct xz_dec_lzma2 *s = kmalloc(sizeof(*s), GFP_KERNEL);
1112 if (s == NULL)
1113 return NULL;
1114
1115 s->dict.mode = mode;
1116 s->dict.size_max = dict_max;
1117
1118 if (DEC_IS_PREALLOC(mode)) {
1119 s->dict.buf = vmalloc(dict_max);
1120 if (s->dict.buf == NULL) {
1121 kfree(s);
1122 return NULL;
1123 }
1124 } else if (DEC_IS_DYNALLOC(mode)) {
1125 s->dict.buf = NULL;
1126 s->dict.allocated = 0;
1127 }
1128
1129 return s;
1130 }
1131
1132 XZ_EXTERN enum xz_ret xz_dec_lzma2_reset(struct xz_dec_lzma2 *s, uint8_t props)
1133 {
1134
1135 if (props > 39)
1136 return XZ_OPTIONS_ERROR;
1137
1138 s->dict.size = 2 + (props & 1);
1139 s->dict.size <<= (props >> 1) + 11;
1140
1141 if (DEC_IS_MULTI(s->dict.mode)) {
1142 if (s->dict.size > s->dict.size_max)
1143 return XZ_MEMLIMIT_ERROR;
1144
1145 s->dict.end = s->dict.size;
1146
1147 if (DEC_IS_DYNALLOC(s->dict.mode)) {
1148 if (s->dict.allocated < s->dict.size) {
1149 s->dict.allocated = s->dict.size;
1150 vfree(s->dict.buf);
1151 s->dict.buf = vmalloc(s->dict.size);
1152 if (s->dict.buf == NULL) {
1153 s->dict.allocated = 0;
1154 return XZ_MEM_ERROR;
1155 }
1156 }
1157 }
1158 }
1159
1160 s->lzma.len = 0;
1161
1162 s->lzma2.sequence = SEQ_CONTROL;
1163 s->lzma2.need_dict_reset = true;
1164
1165 s->temp.size = 0;
1166
1167 return XZ_OK;
1168 }
1169
1170 XZ_EXTERN void xz_dec_lzma2_end(struct xz_dec_lzma2 *s)
1171 {
1172 if (DEC_IS_MULTI(s->dict.mode))
1173 vfree(s->dict.buf);
1174
1175 kfree(s);
1176 }