decompress_unxz: newer version, one which can unpack SHA-256 protected files
function old new delta
check_sizes - 16 +16
crc32_table - 4 +4
index_update 47 40 -7
crc32_validate 110 93 -17
dec_vli 197 165 -32
unpack_xz_stream 4284 4014 -270
------------------------------------------------------------------------------
(add/remove: 2/0 grow/shrink: 0/4 up/down: 20/-326) Total: -306 bytes
Signed-off-by: Denys Vlasenko <vda.linux@googlemail.com>
diff --git a/archival/libunarchive/decompress_unxz.c b/archival/libunarchive/decompress_unxz.c
index 924a525..374b76d 100644
--- a/archival/libunarchive/decompress_unxz.c
+++ b/archival/libunarchive/decompress_unxz.c
@@ -16,9 +16,13 @@
#define XZ_FUNC FAST_FUNC
#define XZ_EXTERN static
-#define xz_crc32_init(table) crc32_filltable(table, /*endian:*/ 0)
-static uint32_t xz_crc32(uint32_t *crc32_table,
- const uint8_t *buf, size_t size, uint32_t crc)
+/* Skip check (rather than fail) of unsupported hash functions */
+#define XZ_DEC_ANY_CHECK 1
+
+/* We use our own crc32 function */
+#define XZ_INTERNAL_CRC32 0
+static uint32_t *crc32_table;
+static uint32_t xz_crc32(const uint8_t *buf, size_t size, uint32_t crc)
{
crc = ~crc;
@@ -29,8 +33,8 @@
return ~crc;
}
-#define xz_crc32 xz_crc32
+/* We use arch-optimized unaligned accessors */
#define get_unaligned_le32(buf) ({ uint32_t v; move_from_unaligned32(v, buf); SWAP_LE32(v); })
#define get_unaligned_be32(buf) ({ uint32_t v; move_from_unaligned32(v, buf); SWAP_BE32(v); })
#define put_unaligned_le32(val, buf) move_to_unaligned16(buf, SWAP_LE32(val))
@@ -64,8 +68,10 @@
iobuf.out = membuf + IN_SIZE;
iobuf.out_size = OUT_SIZE;
+ if (!crc32_table)
+ crc32_table = crc32_filltable(NULL, /*endian:*/ 0);
+
state = xz_dec_init(64*1024); /* initial dict of 64k */
- xz_crc32_init(state->crc32_table);
while (1) {
enum xz_ret r;
@@ -102,7 +108,7 @@
) {
break;
}
- if (r != XZ_OK) {
+ if (r != XZ_OK && r != XZ_UNSUPPORTED_CHECK) {
bb_error_msg("corrupted data");
total = -1;
break;