summaryrefslogtreecommitdiffstats
path: root/compat/zlib/contrib/inflate86
diff options
context:
space:
mode:
authordkf <donal.k.fellows@manchester.ac.uk>2008-12-19 14:44:48 (GMT)
committerdkf <donal.k.fellows@manchester.ac.uk>2008-12-19 14:44:48 (GMT)
commit2cd073ec4bf5ea28c2f1d7db65f4ce95cc875611 (patch)
tree52b7482cbfbaacbd79b4390955254de21b6e7e65 /compat/zlib/contrib/inflate86
parentbfa6d081abacd1d00e3a459c3d89039849818d6a (diff)
downloadtcl-2cd073ec4bf5ea28c2f1d7db65f4ce95cc875611.zip
tcl-2cd073ec4bf5ea28c2f1d7db65f4ce95cc875611.tar.gz
tcl-2cd073ec4bf5ea28c2f1d7db65f4ce95cc875611.tar.bz2
Add previously omitted files so we have a complete zlib 1.2.3 distro...
Diffstat (limited to 'compat/zlib/contrib/inflate86')
-rw-r--r--compat/zlib/contrib/inflate86/inffas86.c1157
-rw-r--r--compat/zlib/contrib/inflate86/inffast.S1368
2 files changed, 2525 insertions, 0 deletions
diff --git a/compat/zlib/contrib/inflate86/inffas86.c b/compat/zlib/contrib/inflate86/inffas86.c
new file mode 100644
index 0000000..6da7635
--- /dev/null
+++ b/compat/zlib/contrib/inflate86/inffas86.c
@@ -0,0 +1,1157 @@
+/* inffas86.c is a hand tuned assembler version of
+ *
+ * inffast.c -- fast decoding
+ * Copyright (C) 1995-2003 Mark Adler
+ * For conditions of distribution and use, see copyright notice in zlib.h
+ *
+ * Copyright (C) 2003 Chris Anderson <christop@charm.net>
+ * Please use the copyright conditions above.
+ *
+ * Dec-29-2003 -- I added AMD64 inflate asm support. This version is also
+ * slightly quicker on x86 systems because, instead of using rep movsb to copy
+ * data, it uses rep movsw, which moves data in 2-byte chunks instead of single
+ * bytes. I've tested the AMD64 code on a Fedora Core 1 + the x86_64 updates
+ * from http://fedora.linux.duke.edu/fc1_x86_64
+ * which is running on an Athlon 64 3000+ / Gigabyte GA-K8VT800M system with
+ * 1GB ram. The 64-bit version is about 4% faster than the 32-bit version,
+ * when decompressing mozilla-source-1.3.tar.gz.
+ *
+ * Mar-13-2003 -- Most of this is derived from inffast.S which is derived from
+ * the gcc -S output of zlib-1.2.0/inffast.c. Zlib-1.2.0 is in beta release at
+ * the moment. I have successfully compiled and tested this code with gcc2.96,
+ * gcc3.2, icc5.0, msvc6.0. It is very close to the speed of inffast.S
+ * compiled with gcc -DNO_MMX, but inffast.S is still faster on the P3 with MMX
+ * enabled. I will attempt to merge the MMX code into this version. Newer
+ * versions of this and inffast.S can be found at
+ * http://www.eetbeetee.com/zlib/ and http://www.charm.net/~christop/zlib/
+ */
+
+#include "zutil.h"
+#include "inftrees.h"
+#include "inflate.h"
+#include "inffast.h"
+
+/* Mark Adler's comments from inffast.c: */
+
+/*
+ Decode literal, length, and distance codes and write out the resulting
+ literal and match bytes until either not enough input or output is
+ available, an end-of-block is encountered, or a data error is encountered.
+ When large enough input and output buffers are supplied to inflate(), for
+ example, a 16K input buffer and a 64K output buffer, more than 95% of the
+ inflate execution time is spent in this routine.
+
+ Entry assumptions:
+
+ state->mode == LEN
+ strm->avail_in >= 6
+ strm->avail_out >= 258
+ start >= strm->avail_out
+ state->bits < 8
+
+ On return, state->mode is one of:
+
+ LEN -- ran out of enough output space or enough available input
+ TYPE -- reached end of block code, inflate() to interpret next block
+ BAD -- error in block data
+
+ Notes:
+
+ - The maximum input bits used by a length/distance pair is 15 bits for the
+ length code, 5 bits for the length extra, 15 bits for the distance code,
+ and 13 bits for the distance extra. This totals 48 bits, or six bytes.
+ Therefore if strm->avail_in >= 6, then there is enough input to avoid
+ checking for available input while decoding.
+
+ - The maximum bytes that a single length/distance pair can output is 258
+ bytes, which is the maximum length that can be coded. inflate_fast()
+ requires strm->avail_out >= 258 for each loop to avoid checking for
+ output space.
+ */
+void inflate_fast(strm, start)
+z_streamp strm;
+unsigned start; /* inflate()'s starting value for strm->avail_out */
+{
+ struct inflate_state FAR *state;
+ struct inffast_ar {
+/* 64 32 x86 x86_64 */
+/* ar offset register */
+/* 0 0 */ void *esp; /* esp save */
+/* 8 4 */ void *ebp; /* ebp save */
+/* 16 8 */ unsigned char FAR *in; /* esi rsi local strm->next_in */
+/* 24 12 */ unsigned char FAR *last; /* r9 while in < last */
+/* 32 16 */ unsigned char FAR *out; /* edi rdi local strm->next_out */
+/* 40 20 */ unsigned char FAR *beg; /* inflate()'s init next_out */
+/* 48 24 */ unsigned char FAR *end; /* r10 while out < end */
+/* 56 28 */ unsigned char FAR *window;/* size of window, wsize!=0 */
+/* 64 32 */ code const FAR *lcode; /* ebp rbp local strm->lencode */
+/* 72 36 */ code const FAR *dcode; /* r11 local strm->distcode */
+/* 80 40 */ unsigned long hold; /* edx rdx local strm->hold */
+/* 88 44 */ unsigned bits; /* ebx rbx local strm->bits */
+/* 92 48 */ unsigned wsize; /* window size */
+/* 96 52 */ unsigned write; /* window write index */
+/*100 56 */ unsigned lmask; /* r12 mask for lcode */
+/*104 60 */ unsigned dmask; /* r13 mask for dcode */
+/*108 64 */ unsigned len; /* r14 match length */
+/*112 68 */ unsigned dist; /* r15 match distance */
+/*116 72 */ unsigned status; /* set when state chng*/
+ } ar;
+
+#if defined( __GNUC__ ) && defined( __amd64__ ) && ! defined( __i386 )
+#define PAD_AVAIL_IN 6
+#define PAD_AVAIL_OUT 258
+#else
+#define PAD_AVAIL_IN 5
+#define PAD_AVAIL_OUT 257
+#endif
+
+ /* copy state to local variables */
+ state = (struct inflate_state FAR *)strm->state;
+ ar.in = strm->next_in;
+ ar.last = ar.in + (strm->avail_in - PAD_AVAIL_IN);
+ ar.out = strm->next_out;
+ ar.beg = ar.out - (start - strm->avail_out);
+ ar.end = ar.out + (strm->avail_out - PAD_AVAIL_OUT);
+ ar.wsize = state->wsize;
+ ar.write = state->write;
+ ar.window = state->window;
+ ar.hold = state->hold;
+ ar.bits = state->bits;
+ ar.lcode = state->lencode;
+ ar.dcode = state->distcode;
+ ar.lmask = (1U << state->lenbits) - 1;
+ ar.dmask = (1U << state->distbits) - 1;
+
+ /* decode literals and length/distances until end-of-block or not enough
+ input data or output space */
+
+ /* align in on 1/2 hold size boundary */
+ while (((unsigned long)(void *)ar.in & (sizeof(ar.hold) / 2 - 1)) != 0) {
+ ar.hold += (unsigned long)*ar.in++ << ar.bits;
+ ar.bits += 8;
+ }
+
+#if defined( __GNUC__ ) && defined( __amd64__ ) && ! defined( __i386 )
+ __asm__ __volatile__ (
+" leaq %0, %%rax\n"
+" movq %%rbp, 8(%%rax)\n" /* save regs rbp and rsp */
+" movq %%rsp, (%%rax)\n"
+" movq %%rax, %%rsp\n" /* make rsp point to &ar */
+" movq 16(%%rsp), %%rsi\n" /* rsi = in */
+" movq 32(%%rsp), %%rdi\n" /* rdi = out */
+" movq 24(%%rsp), %%r9\n" /* r9 = last */
+" movq 48(%%rsp), %%r10\n" /* r10 = end */
+" movq 64(%%rsp), %%rbp\n" /* rbp = lcode */
+" movq 72(%%rsp), %%r11\n" /* r11 = dcode */
+" movq 80(%%rsp), %%rdx\n" /* rdx = hold */
+" movl 88(%%rsp), %%ebx\n" /* ebx = bits */
+" movl 100(%%rsp), %%r12d\n" /* r12d = lmask */
+" movl 104(%%rsp), %%r13d\n" /* r13d = dmask */
+ /* r14d = len */
+ /* r15d = dist */
+" cld\n"
+" cmpq %%rdi, %%r10\n"
+" je .L_one_time\n" /* if only one decode left */
+" cmpq %%rsi, %%r9\n"
+" je .L_one_time\n"
+" jmp .L_do_loop\n"
+
+".L_one_time:\n"
+" movq %%r12, %%r8\n" /* r8 = lmask */
+" cmpb $32, %%bl\n"
+" ja .L_get_length_code_one_time\n"
+
+" lodsl\n" /* eax = *(uint *)in++ */
+" movb %%bl, %%cl\n" /* cl = bits, needs it for shifting */
+" addb $32, %%bl\n" /* bits += 32 */
+" shlq %%cl, %%rax\n"
+" orq %%rax, %%rdx\n" /* hold |= *((uint *)in)++ << bits */
+" jmp .L_get_length_code_one_time\n"
+
+".align 32,0x90\n"
+".L_while_test:\n"
+" cmpq %%rdi, %%r10\n"
+" jbe .L_break_loop\n"
+" cmpq %%rsi, %%r9\n"
+" jbe .L_break_loop\n"
+
+".L_do_loop:\n"
+" movq %%r12, %%r8\n" /* r8 = lmask */
+" cmpb $32, %%bl\n"
+" ja .L_get_length_code\n" /* if (32 < bits) */
+
+" lodsl\n" /* eax = *(uint *)in++ */
+" movb %%bl, %%cl\n" /* cl = bits, needs it for shifting */
+" addb $32, %%bl\n" /* bits += 32 */
+" shlq %%cl, %%rax\n"
+" orq %%rax, %%rdx\n" /* hold |= *((uint *)in)++ << bits */
+
+".L_get_length_code:\n"
+" andq %%rdx, %%r8\n" /* r8 &= hold */
+" movl (%%rbp,%%r8,4), %%eax\n" /* eax = lcode[hold & lmask] */
+
+" movb %%ah, %%cl\n" /* cl = this.bits */
+" subb %%ah, %%bl\n" /* bits -= this.bits */
+" shrq %%cl, %%rdx\n" /* hold >>= this.bits */
+
+" testb %%al, %%al\n"
+" jnz .L_test_for_length_base\n" /* if (op != 0) 45.7% */
+
+" movq %%r12, %%r8\n" /* r8 = lmask */
+" shrl $16, %%eax\n" /* output this.val char */
+" stosb\n"
+
+".L_get_length_code_one_time:\n"
+" andq %%rdx, %%r8\n" /* r8 &= hold */
+" movl (%%rbp,%%r8,4), %%eax\n" /* eax = lcode[hold & lmask] */
+
+".L_dolen:\n"
+" movb %%ah, %%cl\n" /* cl = this.bits */
+" subb %%ah, %%bl\n" /* bits -= this.bits */
+" shrq %%cl, %%rdx\n" /* hold >>= this.bits */
+
+" testb %%al, %%al\n"
+" jnz .L_test_for_length_base\n" /* if (op != 0) 45.7% */
+
+" shrl $16, %%eax\n" /* output this.val char */
+" stosb\n"
+" jmp .L_while_test\n"
+
+".align 32,0x90\n"
+".L_test_for_length_base:\n"
+" movl %%eax, %%r14d\n" /* len = this */
+" shrl $16, %%r14d\n" /* len = this.val */
+" movb %%al, %%cl\n"
+
+" testb $16, %%al\n"
+" jz .L_test_for_second_level_length\n" /* if ((op & 16) == 0) 8% */
+" andb $15, %%cl\n" /* op &= 15 */
+" jz .L_decode_distance\n" /* if (!op) */
+
+".L_add_bits_to_len:\n"
+" subb %%cl, %%bl\n"
+" xorl %%eax, %%eax\n"
+" incl %%eax\n"
+" shll %%cl, %%eax\n"
+" decl %%eax\n"
+" andl %%edx, %%eax\n" /* eax &= hold */
+" shrq %%cl, %%rdx\n"
+" addl %%eax, %%r14d\n" /* len += hold & mask[op] */
+
+".L_decode_distance:\n"
+" movq %%r13, %%r8\n" /* r8 = dmask */
+" cmpb $32, %%bl\n"
+" ja .L_get_distance_code\n" /* if (32 < bits) */
+
+" lodsl\n" /* eax = *(uint *)in++ */
+" movb %%bl, %%cl\n" /* cl = bits, needs it for shifting */
+" addb $32, %%bl\n" /* bits += 32 */
+" shlq %%cl, %%rax\n"
+" orq %%rax, %%rdx\n" /* hold |= *((uint *)in)++ << bits */
+
+".L_get_distance_code:\n"
+" andq %%rdx, %%r8\n" /* r8 &= hold */
+" movl (%%r11,%%r8,4), %%eax\n" /* eax = dcode[hold & dmask] */
+
+".L_dodist:\n"
+" movl %%eax, %%r15d\n" /* dist = this */
+" shrl $16, %%r15d\n" /* dist = this.val */
+" movb %%ah, %%cl\n"
+" subb %%ah, %%bl\n" /* bits -= this.bits */
+" shrq %%cl, %%rdx\n" /* hold >>= this.bits */
+" movb %%al, %%cl\n" /* cl = this.op */
+
+" testb $16, %%al\n" /* if ((op & 16) == 0) */
+" jz .L_test_for_second_level_dist\n"
+" andb $15, %%cl\n" /* op &= 15 */
+" jz .L_check_dist_one\n"
+
+".L_add_bits_to_dist:\n"
+" subb %%cl, %%bl\n"
+" xorl %%eax, %%eax\n"
+" incl %%eax\n"
+" shll %%cl, %%eax\n"
+" decl %%eax\n" /* (1 << op) - 1 */
+" andl %%edx, %%eax\n" /* eax &= hold */
+" shrq %%cl, %%rdx\n"
+" addl %%eax, %%r15d\n" /* dist += hold & ((1 << op) - 1) */
+
+".L_check_window:\n"
+" movq %%rsi, %%r8\n" /* save in so from can use it's reg */
+" movq %%rdi, %%rax\n"
+" subq 40(%%rsp), %%rax\n" /* nbytes = out - beg */
+
+" cmpl %%r15d, %%eax\n"
+" jb .L_clip_window\n" /* if (dist > nbytes) 4.2% */
+
+" movl %%r14d, %%ecx\n" /* ecx = len */
+" movq %%rdi, %%rsi\n"
+" subq %%r15, %%rsi\n" /* from = out - dist */
+
+" sarl %%ecx\n"
+" jnc .L_copy_two\n" /* if len % 2 == 0 */
+
+" rep movsw\n"
+" movb (%%rsi), %%al\n"
+" movb %%al, (%%rdi)\n"
+" incq %%rdi\n"
+
+" movq %%r8, %%rsi\n" /* move in back to %rsi, toss from */
+" jmp .L_while_test\n"
+
+".L_copy_two:\n"
+" rep movsw\n"
+" movq %%r8, %%rsi\n" /* move in back to %rsi, toss from */
+" jmp .L_while_test\n"
+
+".align 32,0x90\n"
+".L_check_dist_one:\n"
+" cmpl $1, %%r15d\n" /* if dist 1, is a memset */
+" jne .L_check_window\n"
+" cmpq %%rdi, 40(%%rsp)\n" /* if out == beg, outside window */
+" je .L_check_window\n"
+
+" movl %%r14d, %%ecx\n" /* ecx = len */
+" movb -1(%%rdi), %%al\n"
+" movb %%al, %%ah\n"
+
+" sarl %%ecx\n"
+" jnc .L_set_two\n"
+" movb %%al, (%%rdi)\n"
+" incq %%rdi\n"
+
+".L_set_two:\n"
+" rep stosw\n"
+" jmp .L_while_test\n"
+
+".align 32,0x90\n"
+".L_test_for_second_level_length:\n"
+" testb $64, %%al\n"
+" jnz .L_test_for_end_of_block\n" /* if ((op & 64) != 0) */
+
+" xorl %%eax, %%eax\n"
+" incl %%eax\n"
+" shll %%cl, %%eax\n"
+" decl %%eax\n"
+" andl %%edx, %%eax\n" /* eax &= hold */
+" addl %%r14d, %%eax\n" /* eax += len */
+" movl (%%rbp,%%rax,4), %%eax\n" /* eax = lcode[val+(hold&mask[op])]*/
+" jmp .L_dolen\n"
+
+".align 32,0x90\n"
+".L_test_for_second_level_dist:\n"
+" testb $64, %%al\n"
+" jnz .L_invalid_distance_code\n" /* if ((op & 64) != 0) */
+
+" xorl %%eax, %%eax\n"
+" incl %%eax\n"
+" shll %%cl, %%eax\n"
+" decl %%eax\n"
+" andl %%edx, %%eax\n" /* eax &= hold */
+" addl %%r15d, %%eax\n" /* eax += dist */
+" movl (%%r11,%%rax,4), %%eax\n" /* eax = dcode[val+(hold&mask[op])]*/
+" jmp .L_dodist\n"
+
+".align 32,0x90\n"
+".L_clip_window:\n"
+" movl %%eax, %%ecx\n" /* ecx = nbytes */
+" movl 92(%%rsp), %%eax\n" /* eax = wsize, prepare for dist cmp */
+" negl %%ecx\n" /* nbytes = -nbytes */
+
+" cmpl %%r15d, %%eax\n"
+" jb .L_invalid_distance_too_far\n" /* if (dist > wsize) */
+
+" addl %%r15d, %%ecx\n" /* nbytes = dist - nbytes */
+" cmpl $0, 96(%%rsp)\n"
+" jne .L_wrap_around_window\n" /* if (write != 0) */
+
+" movq 56(%%rsp), %%rsi\n" /* from = window */
+" subl %%ecx, %%eax\n" /* eax -= nbytes */
+" addq %%rax, %%rsi\n" /* from += wsize - nbytes */
+
+" movl %%r14d, %%eax\n" /* eax = len */
+" cmpl %%ecx, %%r14d\n"
+" jbe .L_do_copy\n" /* if (nbytes >= len) */
+
+" subl %%ecx, %%eax\n" /* eax -= nbytes */
+" rep movsb\n"
+" movq %%rdi, %%rsi\n"
+" subq %%r15, %%rsi\n" /* from = &out[ -dist ] */
+" jmp .L_do_copy\n"
+
+".align 32,0x90\n"
+".L_wrap_around_window:\n"
+" movl 96(%%rsp), %%eax\n" /* eax = write */
+" cmpl %%eax, %%ecx\n"
+" jbe .L_contiguous_in_window\n" /* if (write >= nbytes) */
+
+" movl 92(%%rsp), %%esi\n" /* from = wsize */
+" addq 56(%%rsp), %%rsi\n" /* from += window */
+" addq %%rax, %%rsi\n" /* from += write */
+" subq %%rcx, %%rsi\n" /* from -= nbytes */
+" subl %%eax, %%ecx\n" /* nbytes -= write */
+
+" movl %%r14d, %%eax\n" /* eax = len */
+" cmpl %%ecx, %%eax\n"
+" jbe .L_do_copy\n" /* if (nbytes >= len) */
+
+" subl %%ecx, %%eax\n" /* len -= nbytes */
+" rep movsb\n"
+" movq 56(%%rsp), %%rsi\n" /* from = window */
+" movl 96(%%rsp), %%ecx\n" /* nbytes = write */
+" cmpl %%ecx, %%eax\n"
+" jbe .L_do_copy\n" /* if (nbytes >= len) */
+
+" subl %%ecx, %%eax\n" /* len -= nbytes */
+" rep movsb\n"
+" movq %%rdi, %%rsi\n"
+" subq %%r15, %%rsi\n" /* from = out - dist */
+" jmp .L_do_copy\n"
+
+".align 32,0x90\n"
+".L_contiguous_in_window:\n"
+" movq 56(%%rsp), %%rsi\n" /* rsi = window */
+" addq %%rax, %%rsi\n"
+" subq %%rcx, %%rsi\n" /* from += write - nbytes */
+
+" movl %%r14d, %%eax\n" /* eax = len */
+" cmpl %%ecx, %%eax\n"
+" jbe .L_do_copy\n" /* if (nbytes >= len) */
+
+" subl %%ecx, %%eax\n" /* len -= nbytes */
+" rep movsb\n"
+" movq %%rdi, %%rsi\n"
+" subq %%r15, %%rsi\n" /* from = out - dist */
+" jmp .L_do_copy\n" /* if (nbytes >= len) */
+
+".align 32,0x90\n"
+".L_do_copy:\n"
+" movl %%eax, %%ecx\n" /* ecx = len */
+" rep movsb\n"
+
+" movq %%r8, %%rsi\n" /* move in back to %esi, toss from */
+" jmp .L_while_test\n"
+
+".L_test_for_end_of_block:\n"
+" testb $32, %%al\n"
+" jz .L_invalid_literal_length_code\n"
+" movl $1, 116(%%rsp)\n"
+" jmp .L_break_loop_with_status\n"
+
+".L_invalid_literal_length_code:\n"
+" movl $2, 116(%%rsp)\n"
+" jmp .L_break_loop_with_status\n"
+
+".L_invalid_distance_code:\n"
+" movl $3, 116(%%rsp)\n"
+" jmp .L_break_loop_with_status\n"
+
+".L_invalid_distance_too_far:\n"
+" movl $4, 116(%%rsp)\n"
+" jmp .L_break_loop_with_status\n"
+
+".L_break_loop:\n"
+" movl $0, 116(%%rsp)\n"
+
+".L_break_loop_with_status:\n"
+/* put in, out, bits, and hold back into ar and pop esp */
+" movq %%rsi, 16(%%rsp)\n" /* in */
+" movq %%rdi, 32(%%rsp)\n" /* out */
+" movl %%ebx, 88(%%rsp)\n" /* bits */
+" movq %%rdx, 80(%%rsp)\n" /* hold */
+" movq (%%rsp), %%rax\n" /* restore rbp and rsp */
+" movq 8(%%rsp), %%rbp\n"
+" movq %%rax, %%rsp\n"
+ :
+ : "m" (ar)
+ : "memory", "%rax", "%rbx", "%rcx", "%rdx", "%rsi", "%rdi",
+ "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15"
+ );
+#elif ( defined( __GNUC__ ) || defined( __ICC ) ) && defined( __i386 )
+ __asm__ __volatile__ (
+" leal %0, %%eax\n"
+" movl %%esp, (%%eax)\n" /* save esp, ebp */
+" movl %%ebp, 4(%%eax)\n"
+" movl %%eax, %%esp\n"
+" movl 8(%%esp), %%esi\n" /* esi = in */
+" movl 16(%%esp), %%edi\n" /* edi = out */
+" movl 40(%%esp), %%edx\n" /* edx = hold */
+" movl 44(%%esp), %%ebx\n" /* ebx = bits */
+" movl 32(%%esp), %%ebp\n" /* ebp = lcode */
+
+" cld\n"
+" jmp .L_do_loop\n"
+
+".align 32,0x90\n"
+".L_while_test:\n"
+" cmpl %%edi, 24(%%esp)\n" /* out < end */
+" jbe .L_break_loop\n"
+" cmpl %%esi, 12(%%esp)\n" /* in < last */
+" jbe .L_break_loop\n"
+
+".L_do_loop:\n"
+" cmpb $15, %%bl\n"
+" ja .L_get_length_code\n" /* if (15 < bits) */
+
+" xorl %%eax, %%eax\n"
+" lodsw\n" /* al = *(ushort *)in++ */
+" movb %%bl, %%cl\n" /* cl = bits, needs it for shifting */
+" addb $16, %%bl\n" /* bits += 16 */
+" shll %%cl, %%eax\n"
+" orl %%eax, %%edx\n" /* hold |= *((ushort *)in)++ << bits */
+
+".L_get_length_code:\n"
+" movl 56(%%esp), %%eax\n" /* eax = lmask */
+" andl %%edx, %%eax\n" /* eax &= hold */
+" movl (%%ebp,%%eax,4), %%eax\n" /* eax = lcode[hold & lmask] */
+
+".L_dolen:\n"
+" movb %%ah, %%cl\n" /* cl = this.bits */
+" subb %%ah, %%bl\n" /* bits -= this.bits */
+" shrl %%cl, %%edx\n" /* hold >>= this.bits */
+
+" testb %%al, %%al\n"
+" jnz .L_test_for_length_base\n" /* if (op != 0) 45.7% */
+
+" shrl $16, %%eax\n" /* output this.val char */
+" stosb\n"
+" jmp .L_while_test\n"
+
+".align 32,0x90\n"
+".L_test_for_length_base:\n"
+" movl %%eax, %%ecx\n" /* len = this */
+" shrl $16, %%ecx\n" /* len = this.val */
+" movl %%ecx, 64(%%esp)\n" /* save len */
+" movb %%al, %%cl\n"
+
+" testb $16, %%al\n"
+" jz .L_test_for_second_level_length\n" /* if ((op & 16) == 0) 8% */
+" andb $15, %%cl\n" /* op &= 15 */
+" jz .L_decode_distance\n" /* if (!op) */
+" cmpb %%cl, %%bl\n"
+" jae .L_add_bits_to_len\n" /* if (op <= bits) */
+
+" movb %%cl, %%ch\n" /* stash op in ch, freeing cl */
+" xorl %%eax, %%eax\n"
+" lodsw\n" /* al = *(ushort *)in++ */
+" movb %%bl, %%cl\n" /* cl = bits, needs it for shifting */
+" addb $16, %%bl\n" /* bits += 16 */
+" shll %%cl, %%eax\n"
+" orl %%eax, %%edx\n" /* hold |= *((ushort *)in)++ << bits */
+" movb %%ch, %%cl\n" /* move op back to ecx */
+
+".L_add_bits_to_len:\n"
+" subb %%cl, %%bl\n"
+" xorl %%eax, %%eax\n"
+" incl %%eax\n"
+" shll %%cl, %%eax\n"
+" decl %%eax\n"
+" andl %%edx, %%eax\n" /* eax &= hold */
+" shrl %%cl, %%edx\n"
+" addl %%eax, 64(%%esp)\n" /* len += hold & mask[op] */
+
+".L_decode_distance:\n"
+" cmpb $15, %%bl\n"
+" ja .L_get_distance_code\n" /* if (15 < bits) */
+
+" xorl %%eax, %%eax\n"
+" lodsw\n" /* al = *(ushort *)in++ */
+" movb %%bl, %%cl\n" /* cl = bits, needs it for shifting */
+" addb $16, %%bl\n" /* bits += 16 */
+" shll %%cl, %%eax\n"
+" orl %%eax, %%edx\n" /* hold |= *((ushort *)in)++ << bits */
+
+".L_get_distance_code:\n"
+" movl 60(%%esp), %%eax\n" /* eax = dmask */
+" movl 36(%%esp), %%ecx\n" /* ecx = dcode */
+" andl %%edx, %%eax\n" /* eax &= hold */
+" movl (%%ecx,%%eax,4), %%eax\n"/* eax = dcode[hold & dmask] */
+
+".L_dodist:\n"
+" movl %%eax, %%ebp\n" /* dist = this */
+" shrl $16, %%ebp\n" /* dist = this.val */
+" movb %%ah, %%cl\n"
+" subb %%ah, %%bl\n" /* bits -= this.bits */
+" shrl %%cl, %%edx\n" /* hold >>= this.bits */
+" movb %%al, %%cl\n" /* cl = this.op */
+
+" testb $16, %%al\n" /* if ((op & 16) == 0) */
+" jz .L_test_for_second_level_dist\n"
+" andb $15, %%cl\n" /* op &= 15 */
+" jz .L_check_dist_one\n"
+" cmpb %%cl, %%bl\n"
+" jae .L_add_bits_to_dist\n" /* if (op <= bits) 97.6% */
+
+" movb %%cl, %%ch\n" /* stash op in ch, freeing cl */
+" xorl %%eax, %%eax\n"
+" lodsw\n" /* al = *(ushort *)in++ */
+" movb %%bl, %%cl\n" /* cl = bits, needs it for shifting */
+" addb $16, %%bl\n" /* bits += 16 */
+" shll %%cl, %%eax\n"
+" orl %%eax, %%edx\n" /* hold |= *((ushort *)in)++ << bits */
+" movb %%ch, %%cl\n" /* move op back to ecx */
+
+".L_add_bits_to_dist:\n"
+" subb %%cl, %%bl\n"
+" xorl %%eax, %%eax\n"
+" incl %%eax\n"
+" shll %%cl, %%eax\n"
+" decl %%eax\n" /* (1 << op) - 1 */
+" andl %%edx, %%eax\n" /* eax &= hold */
+" shrl %%cl, %%edx\n"
+" addl %%eax, %%ebp\n" /* dist += hold & ((1 << op) - 1) */
+
+".L_check_window:\n"
+" movl %%esi, 8(%%esp)\n" /* save in so from can use it's reg */
+" movl %%edi, %%eax\n"
+" subl 20(%%esp), %%eax\n" /* nbytes = out - beg */
+
+" cmpl %%ebp, %%eax\n"
+" jb .L_clip_window\n" /* if (dist > nbytes) 4.2% */
+
+" movl 64(%%esp), %%ecx\n" /* ecx = len */
+" movl %%edi, %%esi\n"
+" subl %%ebp, %%esi\n" /* from = out - dist */
+
+" sarl %%ecx\n"
+" jnc .L_copy_two\n" /* if len % 2 == 0 */
+
+" rep movsw\n"
+" movb (%%esi), %%al\n"
+" movb %%al, (%%edi)\n"
+" incl %%edi\n"
+
+" movl 8(%%esp), %%esi\n" /* move in back to %esi, toss from */
+" movl 32(%%esp), %%ebp\n" /* ebp = lcode */
+" jmp .L_while_test\n"
+
+".L_copy_two:\n"
+" rep movsw\n"
+" movl 8(%%esp), %%esi\n" /* move in back to %esi, toss from */
+" movl 32(%%esp), %%ebp\n" /* ebp = lcode */
+" jmp .L_while_test\n"
+
+".align 32,0x90\n"
+".L_check_dist_one:\n"
+" cmpl $1, %%ebp\n" /* if dist 1, is a memset */
+" jne .L_check_window\n"
+" cmpl %%edi, 20(%%esp)\n"
+" je .L_check_window\n" /* out == beg, if outside window */
+
+" movl 64(%%esp), %%ecx\n" /* ecx = len */
+" movb -1(%%edi), %%al\n"
+" movb %%al, %%ah\n"
+
+" sarl %%ecx\n"
+" jnc .L_set_two\n"
+" movb %%al, (%%edi)\n"
+" incl %%edi\n"
+
+".L_set_two:\n"
+" rep stosw\n"
+" movl 32(%%esp), %%ebp\n" /* ebp = lcode */
+" jmp .L_while_test\n"
+
+".align 32,0x90\n"
+".L_test_for_second_level_length:\n"
+" testb $64, %%al\n"
+" jnz .L_test_for_end_of_block\n" /* if ((op & 64) != 0) */
+
+" xorl %%eax, %%eax\n"
+" incl %%eax\n"
+" shll %%cl, %%eax\n"
+" decl %%eax\n"
+" andl %%edx, %%eax\n" /* eax &= hold */
+" addl 64(%%esp), %%eax\n" /* eax += len */
+" movl (%%ebp,%%eax,4), %%eax\n" /* eax = lcode[val+(hold&mask[op])]*/
+" jmp .L_dolen\n"
+
+".align 32,0x90\n"
+".L_test_for_second_level_dist:\n"
+" testb $64, %%al\n"
+" jnz .L_invalid_distance_code\n" /* if ((op & 64) != 0) */
+
+" xorl %%eax, %%eax\n"
+" incl %%eax\n"
+" shll %%cl, %%eax\n"
+" decl %%eax\n"
+" andl %%edx, %%eax\n" /* eax &= hold */
+" addl %%ebp, %%eax\n" /* eax += dist */
+" movl 36(%%esp), %%ecx\n" /* ecx = dcode */
+" movl (%%ecx,%%eax,4), %%eax\n" /* eax = dcode[val+(hold&mask[op])]*/
+" jmp .L_dodist\n"
+
+".align 32,0x90\n"
+".L_clip_window:\n"
+" movl %%eax, %%ecx\n"
+" movl 48(%%esp), %%eax\n" /* eax = wsize */
+" negl %%ecx\n" /* nbytes = -nbytes */
+" movl 28(%%esp), %%esi\n" /* from = window */
+
+" cmpl %%ebp, %%eax\n"
+" jb .L_invalid_distance_too_far\n" /* if (dist > wsize) */
+
+" addl %%ebp, %%ecx\n" /* nbytes = dist - nbytes */
+" cmpl $0, 52(%%esp)\n"
+" jne .L_wrap_around_window\n" /* if (write != 0) */
+
+" subl %%ecx, %%eax\n"
+" addl %%eax, %%esi\n" /* from += wsize - nbytes */
+
+" movl 64(%%esp), %%eax\n" /* eax = len */
+" cmpl %%ecx, %%eax\n"
+" jbe .L_do_copy\n" /* if (nbytes >= len) */
+
+" subl %%ecx, %%eax\n" /* len -= nbytes */
+" rep movsb\n"
+" movl %%edi, %%esi\n"
+" subl %%ebp, %%esi\n" /* from = out - dist */
+" jmp .L_do_copy\n"
+
+".align 32,0x90\n"
+".L_wrap_around_window:\n"
+" movl 52(%%esp), %%eax\n" /* eax = write */
+" cmpl %%eax, %%ecx\n"
+" jbe .L_contiguous_in_window\n" /* if (write >= nbytes) */
+
+" addl 48(%%esp), %%esi\n" /* from += wsize */
+" addl %%eax, %%esi\n" /* from += write */
+" subl %%ecx, %%esi\n" /* from -= nbytes */
+" subl %%eax, %%ecx\n" /* nbytes -= write */
+
+" movl 64(%%esp), %%eax\n" /* eax = len */
+" cmpl %%ecx, %%eax\n"
+" jbe .L_do_copy\n" /* if (nbytes >= len) */
+
+" subl %%ecx, %%eax\n" /* len -= nbytes */
+" rep movsb\n"
+" movl 28(%%esp), %%esi\n" /* from = window */
+" movl 52(%%esp), %%ecx\n" /* nbytes = write */
+" cmpl %%ecx, %%eax\n"
+" jbe .L_do_copy\n" /* if (nbytes >= len) */
+
+" subl %%ecx, %%eax\n" /* len -= nbytes */
+" rep movsb\n"
+" movl %%edi, %%esi\n"
+" subl %%ebp, %%esi\n" /* from = out - dist */
+" jmp .L_do_copy\n"
+
+".align 32,0x90\n"
+".L_contiguous_in_window:\n"
+" addl %%eax, %%esi\n"
+" subl %%ecx, %%esi\n" /* from += write - nbytes */
+
+" movl 64(%%esp), %%eax\n" /* eax = len */
+" cmpl %%ecx, %%eax\n"
+" jbe .L_do_copy\n" /* if (nbytes >= len) */
+
+" subl %%ecx, %%eax\n" /* len -= nbytes */
+" rep movsb\n"
+" movl %%edi, %%esi\n"
+" subl %%ebp, %%esi\n" /* from = out - dist */
+" jmp .L_do_copy\n" /* if (nbytes >= len) */
+
+".align 32,0x90\n"
+".L_do_copy:\n"
+" movl %%eax, %%ecx\n"
+" rep movsb\n"
+
+" movl 8(%%esp), %%esi\n" /* move in back to %esi, toss from */
+" movl 32(%%esp), %%ebp\n" /* ebp = lcode */
+" jmp .L_while_test\n"
+
+".L_test_for_end_of_block:\n"
+" testb $32, %%al\n"
+" jz .L_invalid_literal_length_code\n"
+" movl $1, 72(%%esp)\n"
+" jmp .L_break_loop_with_status\n"
+
+".L_invalid_literal_length_code:\n"
+" movl $2, 72(%%esp)\n"
+" jmp .L_break_loop_with_status\n"
+
+".L_invalid_distance_code:\n"
+" movl $3, 72(%%esp)\n"
+" jmp .L_break_loop_with_status\n"
+
+".L_invalid_distance_too_far:\n"
+" movl 8(%%esp), %%esi\n"
+" movl $4, 72(%%esp)\n"
+" jmp .L_break_loop_with_status\n"
+
+".L_break_loop:\n"
+" movl $0, 72(%%esp)\n"
+
+".L_break_loop_with_status:\n"
+/* put in, out, bits, and hold back into ar and pop esp */
+" movl %%esi, 8(%%esp)\n" /* save in */
+" movl %%edi, 16(%%esp)\n" /* save out */
+" movl %%ebx, 44(%%esp)\n" /* save bits */
+" movl %%edx, 40(%%esp)\n" /* save hold */
+" movl 4(%%esp), %%ebp\n" /* restore esp, ebp */
+" movl (%%esp), %%esp\n"
+ :
+ : "m" (ar)
+ : "memory", "%eax", "%ebx", "%ecx", "%edx", "%esi", "%edi"
+ );
+#elif defined( _MSC_VER ) && ! defined( _M_AMD64 )
+ __asm {
+ lea eax, ar
+ mov [eax], esp /* save esp, ebp */
+ mov [eax+4], ebp
+ mov esp, eax
+ mov esi, [esp+8] /* esi = in */
+ mov edi, [esp+16] /* edi = out */
+ mov edx, [esp+40] /* edx = hold */
+ mov ebx, [esp+44] /* ebx = bits */
+ mov ebp, [esp+32] /* ebp = lcode */
+
+ cld
+ jmp L_do_loop
+
+ALIGN 4
+L_while_test:
+ cmp [esp+24], edi
+ jbe L_break_loop
+ cmp [esp+12], esi
+ jbe L_break_loop
+
+L_do_loop:
+ cmp bl, 15
+ ja L_get_length_code /* if (15 < bits) */
+
+ xor eax, eax
+ lodsw /* al = *(ushort *)in++ */
+ mov cl, bl /* cl = bits, needs it for shifting */
+ add bl, 16 /* bits += 16 */
+ shl eax, cl
+ or edx, eax /* hold |= *((ushort *)in)++ << bits */
+
+L_get_length_code:
+ mov eax, [esp+56] /* eax = lmask */
+ and eax, edx /* eax &= hold */
+ mov eax, [ebp+eax*4] /* eax = lcode[hold & lmask] */
+
+L_dolen:
+ mov cl, ah /* cl = this.bits */
+ sub bl, ah /* bits -= this.bits */
+ shr edx, cl /* hold >>= this.bits */
+
+ test al, al
+ jnz L_test_for_length_base /* if (op != 0) 45.7% */
+
+ shr eax, 16 /* output this.val char */
+ stosb
+ jmp L_while_test
+
+ALIGN 4
+L_test_for_length_base:
+ mov ecx, eax /* len = this */
+ shr ecx, 16 /* len = this.val */
+ mov [esp+64], ecx /* save len */
+ mov cl, al
+
+ test al, 16
+ jz L_test_for_second_level_length /* if ((op & 16) == 0) 8% */
+ and cl, 15 /* op &= 15 */
+ jz L_decode_distance /* if (!op) */
+ cmp bl, cl
+ jae L_add_bits_to_len /* if (op <= bits) */
+
+ mov ch, cl /* stash op in ch, freeing cl */
+ xor eax, eax
+ lodsw /* al = *(ushort *)in++ */
+ mov cl, bl /* cl = bits, needs it for shifting */
+ add bl, 16 /* bits += 16 */
+ shl eax, cl
+ or edx, eax /* hold |= *((ushort *)in)++ << bits */
+ mov cl, ch /* move op back to ecx */
+
+L_add_bits_to_len:
+ sub bl, cl
+ xor eax, eax
+ inc eax
+ shl eax, cl
+ dec eax
+ and eax, edx /* eax &= hold */
+ shr edx, cl
+ add [esp+64], eax /* len += hold & mask[op] */
+
+L_decode_distance:
+ cmp bl, 15
+ ja L_get_distance_code /* if (15 < bits) */
+
+ xor eax, eax
+ lodsw /* al = *(ushort *)in++ */
+ mov cl, bl /* cl = bits, needs it for shifting */
+ add bl, 16 /* bits += 16 */
+ shl eax, cl
+ or edx, eax /* hold |= *((ushort *)in)++ << bits */
+
+L_get_distance_code:
+ mov eax, [esp+60] /* eax = dmask */
+ mov ecx, [esp+36] /* ecx = dcode */
+ and eax, edx /* eax &= hold */
+ mov eax, [ecx+eax*4]/* eax = dcode[hold & dmask] */
+
+L_dodist:
+ mov ebp, eax /* dist = this */
+ shr ebp, 16 /* dist = this.val */
+ mov cl, ah
+ sub bl, ah /* bits -= this.bits */
+ shr edx, cl /* hold >>= this.bits */
+ mov cl, al /* cl = this.op */
+
+ test al, 16 /* if ((op & 16) == 0) */
+ jz L_test_for_second_level_dist
+ and cl, 15 /* op &= 15 */
+ jz L_check_dist_one
+ cmp bl, cl
+ jae L_add_bits_to_dist /* if (op <= bits) 97.6% */
+
+ mov ch, cl /* stash op in ch, freeing cl */
+ xor eax, eax
+ lodsw /* al = *(ushort *)in++ */
+ mov cl, bl /* cl = bits, needs it for shifting */
+ add bl, 16 /* bits += 16 */
+ shl eax, cl
+ or edx, eax /* hold |= *((ushort *)in)++ << bits */
+ mov cl, ch /* move op back to ecx */
+
+L_add_bits_to_dist:
+ sub bl, cl
+ xor eax, eax
+ inc eax
+ shl eax, cl
+ dec eax /* (1 << op) - 1 */
+ and eax, edx /* eax &= hold */
+ shr edx, cl
+ add ebp, eax /* dist += hold & ((1 << op) - 1) */
+
+L_check_window:
+ mov [esp+8], esi /* save in so from can use it's reg */
+ mov eax, edi
+ sub eax, [esp+20] /* nbytes = out - beg */
+
+ cmp eax, ebp
+ jb L_clip_window /* if (dist > nbytes) 4.2% */
+
+ mov ecx, [esp+64] /* ecx = len */
+ mov esi, edi
+ sub esi, ebp /* from = out - dist */
+
+ sar ecx, 1
+ jnc L_copy_two
+
+ rep movsw
+ mov al, [esi]
+ mov [edi], al
+ inc edi
+
+ mov esi, [esp+8] /* move in back to %esi, toss from */
+ mov ebp, [esp+32] /* ebp = lcode */
+ jmp L_while_test
+
+L_copy_two:
+ rep movsw
+ mov esi, [esp+8] /* move in back to %esi, toss from */
+ mov ebp, [esp+32] /* ebp = lcode */
+ jmp L_while_test
+
+ALIGN 4
+L_check_dist_one:
+ cmp ebp, 1 /* if dist 1, is a memset */
+ jne L_check_window
+ cmp [esp+20], edi
+ je L_check_window /* out == beg, if outside window */
+
+ mov ecx, [esp+64] /* ecx = len */
+ mov al, [edi-1]
+ mov ah, al
+
+ sar ecx, 1
+ jnc L_set_two
+ mov [edi], al /* memset out with from[-1] */
+ inc edi
+
+L_set_two:
+ rep stosw
+ mov ebp, [esp+32] /* ebp = lcode */
+ jmp L_while_test
+
+ALIGN 4
+L_test_for_second_level_length:
+ test al, 64
+ jnz L_test_for_end_of_block /* if ((op & 64) != 0) */
+
+ xor eax, eax
+ inc eax
+ shl eax, cl
+ dec eax
+ and eax, edx /* eax &= hold */
+ add eax, [esp+64] /* eax += len */
+ mov eax, [ebp+eax*4] /* eax = lcode[val+(hold&mask[op])]*/
+ jmp L_dolen
+
+ALIGN 4
+L_test_for_second_level_dist:
+ test al, 64
+ jnz L_invalid_distance_code /* if ((op & 64) != 0) */
+
+ xor eax, eax
+ inc eax
+ shl eax, cl
+ dec eax
+ and eax, edx /* eax &= hold */
+ add eax, ebp /* eax += dist */
+ mov ecx, [esp+36] /* ecx = dcode */
+ mov eax, [ecx+eax*4] /* eax = dcode[val+(hold&mask[op])]*/
+ jmp L_dodist
+
+ALIGN 4
+L_clip_window:
+ mov ecx, eax
+ mov eax, [esp+48] /* eax = wsize */
+ neg ecx /* nbytes = -nbytes */
+ mov esi, [esp+28] /* from = window */
+
+ cmp eax, ebp
+ jb L_invalid_distance_too_far /* if (dist > wsize) */
+
+ add ecx, ebp /* nbytes = dist - nbytes */
+ cmp dword ptr [esp+52], 0
+ jne L_wrap_around_window /* if (write != 0) */
+
+ sub eax, ecx
+ add esi, eax /* from += wsize - nbytes */
+
+ mov eax, [esp+64] /* eax = len */
+ cmp eax, ecx
+ jbe L_do_copy /* if (nbytes >= len) */
+
+ sub eax, ecx /* len -= nbytes */
+ rep movsb
+ mov esi, edi
+ sub esi, ebp /* from = out - dist */
+ jmp L_do_copy
+
+ALIGN 4
+L_wrap_around_window:
+ mov eax, [esp+52] /* eax = write */
+ cmp ecx, eax
+ jbe L_contiguous_in_window /* if (write >= nbytes) */
+
+ add esi, [esp+48] /* from += wsize */
+ add esi, eax /* from += write */
+ sub esi, ecx /* from -= nbytes */
+ sub ecx, eax /* nbytes -= write */
+
+ mov eax, [esp+64] /* eax = len */
+ cmp eax, ecx
+ jbe L_do_copy /* if (nbytes >= len) */
+
+ sub eax, ecx /* len -= nbytes */
+ rep movsb
+ mov esi, [esp+28] /* from = window */
+ mov ecx, [esp+52] /* nbytes = write */
+ cmp eax, ecx
+ jbe L_do_copy /* if (nbytes >= len) */
+
+ sub eax, ecx /* len -= nbytes */
+ rep movsb
+ mov esi, edi
+ sub esi, ebp /* from = out - dist */
+ jmp L_do_copy
+
+ALIGN 4
+L_contiguous_in_window:
+ add esi, eax
+ sub esi, ecx /* from += write - nbytes */
+
+ mov eax, [esp+64] /* eax = len */
+ cmp eax, ecx
+ jbe L_do_copy /* if (nbytes >= len) */
+
+ sub eax, ecx /* len -= nbytes */
+ rep movsb
+ mov esi, edi
+ sub esi, ebp /* from = out - dist */
+ jmp L_do_copy
+
+ALIGN 4
+L_do_copy:
+ mov ecx, eax
+ rep movsb
+
+ mov esi, [esp+8] /* move in back to %esi, toss from */
+ mov ebp, [esp+32] /* ebp = lcode */
+ jmp L_while_test
+
+L_test_for_end_of_block:
+ test al, 32
+ jz L_invalid_literal_length_code
+ mov dword ptr [esp+72], 1
+ jmp L_break_loop_with_status
+
+L_invalid_literal_length_code:
+ mov dword ptr [esp+72], 2
+ jmp L_break_loop_with_status
+
+L_invalid_distance_code:
+ mov dword ptr [esp+72], 3
+ jmp L_break_loop_with_status
+
+L_invalid_distance_too_far:
+ mov esi, [esp+4]
+ mov dword ptr [esp+72], 4
+ jmp L_break_loop_with_status
+
+L_break_loop:
+ mov dword ptr [esp+72], 0
+
+L_break_loop_with_status:
+/* put in, out, bits, and hold back into ar and pop esp */
+ mov [esp+8], esi /* save in */
+ mov [esp+16], edi /* save out */
+ mov [esp+44], ebx /* save bits */
+ mov [esp+40], edx /* save hold */
+ mov ebp, [esp+4] /* restore esp, ebp */
+ mov esp, [esp]
+ }
+#else
+#error "x86 architecture not defined"
+#endif
+
+ if (ar.status > 1) {
+ if (ar.status == 2)
+ strm->msg = "invalid literal/length code";
+ else if (ar.status == 3)
+ strm->msg = "invalid distance code";
+ else
+ strm->msg = "invalid distance too far back";
+ state->mode = BAD;
+ }
+ else if ( ar.status == 1 ) {
+ state->mode = TYPE;
+ }
+
+ /* return unused bytes (on entry, bits < 8, so in won't go too far back) */
+ ar.len = ar.bits >> 3;
+ ar.in -= ar.len;
+ ar.bits -= ar.len << 3;
+ ar.hold &= (1U << ar.bits) - 1;
+
+ /* update state and return */
+ strm->next_in = ar.in;
+ strm->next_out = ar.out;
+ strm->avail_in = (unsigned)(ar.in < ar.last ?
+ PAD_AVAIL_IN + (ar.last - ar.in) :
+ PAD_AVAIL_IN - (ar.in - ar.last));
+ strm->avail_out = (unsigned)(ar.out < ar.end ?
+ PAD_AVAIL_OUT + (ar.end - ar.out) :
+ PAD_AVAIL_OUT - (ar.out - ar.end));
+ state->hold = ar.hold;
+ state->bits = ar.bits;
+ return;
+}
+
diff --git a/compat/zlib/contrib/inflate86/inffast.S b/compat/zlib/contrib/inflate86/inffast.S
new file mode 100644
index 0000000..2245a29
--- /dev/null
+++ b/compat/zlib/contrib/inflate86/inffast.S
@@ -0,0 +1,1368 @@
+/*
+ * inffast.S is a hand tuned assembler version of:
+ *
+ * inffast.c -- fast decoding
+ * Copyright (C) 1995-2003 Mark Adler
+ * For conditions of distribution and use, see copyright notice in zlib.h
+ *
+ * Copyright (C) 2003 Chris Anderson <christop@charm.net>
+ * Please use the copyright conditions above.
+ *
+ * This version (Jan-23-2003) of inflate_fast was coded and tested under
+ * GNU/Linux on a pentium 3, using the gcc-3.2 compiler distribution. On that
+ * machine, I found that gzip style archives decompressed about 20% faster than
+ * the gcc-3.2 -O3 -fomit-frame-pointer compiled version. Your results will
+ * depend on how large of a buffer is used for z_stream.next_in & next_out
+ * (8K-32K worked best for my 256K cpu cache) and how much overhead there is in
+ * stream processing I/O and crc32/addler32. In my case, this routine used
+ * 70% of the cpu time and crc32 used 20%.
+ *
+ * I am confident that this version will work in the general case, but I have
+ * not tested a wide variety of datasets or a wide variety of platforms.
+ *
+ * Jan-24-2003 -- Added -DUSE_MMX define for slightly faster inflating.
+ * It should be a runtime flag instead of compile time flag...
+ *
+ * Jan-26-2003 -- Added runtime check for MMX support with cpuid instruction.
+ * With -DUSE_MMX, only MMX code is compiled. With -DNO_MMX, only non-MMX code
+ * is compiled. Without either option, runtime detection is enabled. Runtime
+ * detection should work on all modern cpus and the recomended algorithm (flip
+ * ID bit on eflags and then use the cpuid instruction) is used in many
+ * multimedia applications. Tested under win2k with gcc-2.95 and gas-2.12
+ * distributed with cygwin3. Compiling with gcc-2.95 -c inffast.S -o
+ * inffast.obj generates a COFF object which can then be linked with MSVC++
+ * compiled code. Tested under FreeBSD 4.7 with gcc-2.95.
+ *
+ * Jan-28-2003 -- Tested Athlon XP... MMX mode is slower than no MMX (and
+ * slower than compiler generated code). Adjusted cpuid check to use the MMX
+ * code only for Pentiums < P4 until I have more data on the P4. Speed
+ * improvment is only about 15% on the Athlon when compared with code generated
+ * with MSVC++. Not sure yet, but I think the P4 will also be slower using the
+ * MMX mode because many of it's x86 ALU instructions execute in .5 cycles and
+ * have less latency than MMX ops. Added code to buffer the last 11 bytes of
+ * the input stream since the MMX code grabs bits in chunks of 32, which
+ * differs from the inffast.c algorithm. I don't think there would have been
+ * read overruns where a page boundary was crossed (a segfault), but there
+ * could have been overruns when next_in ends on unaligned memory (unintialized
+ * memory read).
+ *
+ * Mar-13-2003 -- P4 MMX is slightly slower than P4 NO_MMX. I created a C
+ * version of the non-MMX code so that it doesn't depend on zstrm and zstate
+ * structure offsets which are hard coded in this file. This was last tested
+ * with zlib-1.2.0 which is currently in beta testing, newer versions of this
+ * and inffas86.c can be found at http://www.eetbeetee.com/zlib/ and
+ * http://www.charm.net/~christop/zlib/
+ */
+
+
+/*
+ * if you have underscore linking problems (_inflate_fast undefined), try
+ * using -DGAS_COFF
+ */
+#if ! defined( GAS_COFF ) && ! defined( GAS_ELF )
+
+#if defined( WIN32 ) || defined( __CYGWIN__ )
+#define GAS_COFF /* windows object format */
+#else
+#define GAS_ELF
+#endif
+
+#endif /* ! GAS_COFF && ! GAS_ELF */
+
+
+#if defined( GAS_COFF )
+
+/* coff externals have underscores */
+#define inflate_fast _inflate_fast
+#define inflate_fast_use_mmx _inflate_fast_use_mmx
+
+#endif /* GAS_COFF */
+
+
+.file "inffast.S"
+
+.globl inflate_fast
+
+.text
+.align 4,0
+.L_invalid_literal_length_code_msg:
+.string "invalid literal/length code"
+
+.align 4,0
+.L_invalid_distance_code_msg:
+.string "invalid distance code"
+
+.align 4,0
+.L_invalid_distance_too_far_msg:
+.string "invalid distance too far back"
+
+#if ! defined( NO_MMX )
+.align 4,0
+.L_mask: /* mask[N] = ( 1 << N ) - 1 */
+.long 0
+.long 1
+.long 3
+.long 7
+.long 15
+.long 31
+.long 63
+.long 127
+.long 255
+.long 511
+.long 1023
+.long 2047
+.long 4095
+.long 8191
+.long 16383
+.long 32767
+.long 65535
+.long 131071
+.long 262143
+.long 524287
+.long 1048575
+.long 2097151
+.long 4194303
+.long 8388607
+.long 16777215
+.long 33554431
+.long 67108863
+.long 134217727
+.long 268435455
+.long 536870911
+.long 1073741823
+.long 2147483647
+.long 4294967295
+#endif /* NO_MMX */
+
+.text
+
+/*
+ * struct z_stream offsets, in zlib.h
+ */
+#define next_in_strm 0 /* strm->next_in */
+#define avail_in_strm 4 /* strm->avail_in */
+#define next_out_strm 12 /* strm->next_out */
+#define avail_out_strm 16 /* strm->avail_out */
+#define msg_strm 24 /* strm->msg */
+#define state_strm 28 /* strm->state */
+
+/*
+ * struct inflate_state offsets, in inflate.h
+ */
+#define mode_state 0 /* state->mode */
+#define wsize_state 32 /* state->wsize */
+#define write_state 40 /* state->write */
+#define window_state 44 /* state->window */
+#define hold_state 48 /* state->hold */
+#define bits_state 52 /* state->bits */
+#define lencode_state 68 /* state->lencode */
+#define distcode_state 72 /* state->distcode */
+#define lenbits_state 76 /* state->lenbits */
+#define distbits_state 80 /* state->distbits */
+
+/*
+ * inflate_fast's activation record
+ */
+#define local_var_size 64 /* how much local space for vars */
+#define strm_sp 88 /* first arg: z_stream * (local_var_size + 24) */
+#define start_sp 92 /* second arg: unsigned int (local_var_size + 28) */
+
+/*
+ * offsets for local vars on stack
+ */
+#define out 60 /* unsigned char* */
+#define window 56 /* unsigned char* */
+#define wsize 52 /* unsigned int */
+#define write 48 /* unsigned int */
+#define in 44 /* unsigned char* */
+#define beg 40 /* unsigned char* */
+#define buf 28 /* char[ 12 ] */
+#define len 24 /* unsigned int */
+#define last 20 /* unsigned char* */
+#define end 16 /* unsigned char* */
+#define dcode 12 /* code* */
+#define lcode 8 /* code* */
+#define dmask 4 /* unsigned int */
+#define lmask 0 /* unsigned int */
+
+/*
+ * typedef enum inflate_mode consts, in inflate.h
+ */
+#define INFLATE_MODE_TYPE 11 /* state->mode flags enum-ed in inflate.h */
+#define INFLATE_MODE_BAD 26
+
+
+#if ! defined( USE_MMX ) && ! defined( NO_MMX )
+
+#define RUN_TIME_MMX
+
+#define CHECK_MMX 1
+#define DO_USE_MMX 2
+#define DONT_USE_MMX 3
+
+.globl inflate_fast_use_mmx
+
+.data
+
+.align 4,0
+inflate_fast_use_mmx: /* integer flag for run time control 1=check,2=mmx,3=no */
+.long CHECK_MMX
+
+#if defined( GAS_ELF )
+/* elf info */
+.type inflate_fast_use_mmx,@object
+.size inflate_fast_use_mmx,4
+#endif
+
+#endif /* RUN_TIME_MMX */
+
+#if defined( GAS_COFF )
+/* coff info: scl 2 = extern, type 32 = function */
+.def inflate_fast; .scl 2; .type 32; .endef
+#endif
+
+.text
+
+.align 32,0x90
+inflate_fast:
+ pushl %edi
+ pushl %esi
+ pushl %ebp
+ pushl %ebx
+ pushf /* save eflags (strm_sp, state_sp assumes this is 32 bits) */
+ subl $local_var_size, %esp
+ cld
+
+#define strm_r %esi
+#define state_r %edi
+
+ movl strm_sp(%esp), strm_r
+ movl state_strm(strm_r), state_r
+
+ /* in = strm->next_in;
+ * out = strm->next_out;
+ * last = in + strm->avail_in - 11;
+ * beg = out - (start - strm->avail_out);
+ * end = out + (strm->avail_out - 257);
+ */
+ movl avail_in_strm(strm_r), %edx
+ movl next_in_strm(strm_r), %eax
+
+ addl %eax, %edx /* avail_in += next_in */
+ subl $11, %edx /* avail_in -= 11 */
+
+ movl %eax, in(%esp)
+ movl %edx, last(%esp)
+
+ movl start_sp(%esp), %ebp
+ movl avail_out_strm(strm_r), %ecx
+ movl next_out_strm(strm_r), %ebx
+
+ subl %ecx, %ebp /* start -= avail_out */
+ negl %ebp /* start = -start */
+ addl %ebx, %ebp /* start += next_out */
+
+ subl $257, %ecx /* avail_out -= 257 */
+ addl %ebx, %ecx /* avail_out += out */
+
+ movl %ebx, out(%esp)
+ movl %ebp, beg(%esp)
+ movl %ecx, end(%esp)
+
+ /* wsize = state->wsize;
+ * write = state->write;
+ * window = state->window;
+ * hold = state->hold;
+ * bits = state->bits;
+ * lcode = state->lencode;
+ * dcode = state->distcode;
+ * lmask = ( 1 << state->lenbits ) - 1;
+ * dmask = ( 1 << state->distbits ) - 1;
+ */
+
+ movl lencode_state(state_r), %eax
+ movl distcode_state(state_r), %ecx
+
+ movl %eax, lcode(%esp)
+ movl %ecx, dcode(%esp)
+
+ movl $1, %eax
+ movl lenbits_state(state_r), %ecx
+ shll %cl, %eax
+ decl %eax
+ movl %eax, lmask(%esp)
+
+ movl $1, %eax
+ movl distbits_state(state_r), %ecx
+ shll %cl, %eax
+ decl %eax
+ movl %eax, dmask(%esp)
+
+ movl wsize_state(state_r), %eax
+ movl write_state(state_r), %ecx
+ movl window_state(state_r), %edx
+
+ movl %eax, wsize(%esp)
+ movl %ecx, write(%esp)
+ movl %edx, window(%esp)
+
+ movl hold_state(state_r), %ebp
+ movl bits_state(state_r), %ebx
+
+#undef strm_r
+#undef state_r
+
+#define in_r %esi
+#define from_r %esi
+#define out_r %edi
+
+ movl in(%esp), in_r
+ movl last(%esp), %ecx
+ cmpl in_r, %ecx
+ ja .L_align_long /* if in < last */
+
+ addl $11, %ecx /* ecx = &in[ avail_in ] */
+ subl in_r, %ecx /* ecx = avail_in */
+ movl $12, %eax
+ subl %ecx, %eax /* eax = 12 - avail_in */
+ leal buf(%esp), %edi
+ rep movsb /* memcpy( buf, in, avail_in ) */
+ movl %eax, %ecx
+ xorl %eax, %eax
+ rep stosb /* memset( &buf[ avail_in ], 0, 12 - avail_in ) */
+ leal buf(%esp), in_r /* in = buf */
+ movl in_r, last(%esp) /* last = in, do just one iteration */
+ jmp .L_is_aligned
+
+ /* align in_r on long boundary */
+.L_align_long:
+ testl $3, in_r
+ jz .L_is_aligned
+ xorl %eax, %eax
+ movb (in_r), %al
+ incl in_r
+ movl %ebx, %ecx
+ addl $8, %ebx
+ shll %cl, %eax
+ orl %eax, %ebp
+ jmp .L_align_long
+
+.L_is_aligned:
+ movl out(%esp), out_r
+
+#if defined( NO_MMX )
+ jmp .L_do_loop
+#endif
+
+#if defined( USE_MMX )
+ jmp .L_init_mmx
+#endif
+
+/*** Runtime MMX check ***/
+
+#if defined( RUN_TIME_MMX )
+.L_check_mmx:
+ cmpl $DO_USE_MMX, inflate_fast_use_mmx
+ je .L_init_mmx
+ ja .L_do_loop /* > 2 */
+
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ pushf
+ movl (%esp), %eax /* copy eflags to eax */
+ xorl $0x200000, (%esp) /* try toggling ID bit of eflags (bit 21)
+ * to see if cpu supports cpuid...
+ * ID bit method not supported by NexGen but
+ * bios may load a cpuid instruction and
+ * cpuid may be disabled on Cyrix 5-6x86 */
+ popf
+ pushf
+ popl %edx /* copy new eflags to edx */
+ xorl %eax, %edx /* test if ID bit is flipped */
+ jz .L_dont_use_mmx /* not flipped if zero */
+ xorl %eax, %eax
+ cpuid
+ cmpl $0x756e6547, %ebx /* check for GenuineIntel in ebx,ecx,edx */
+ jne .L_dont_use_mmx
+ cmpl $0x6c65746e, %ecx
+ jne .L_dont_use_mmx
+ cmpl $0x49656e69, %edx
+ jne .L_dont_use_mmx
+ movl $1, %eax
+ cpuid /* get cpu features */
+ shrl $8, %eax
+ andl $15, %eax
+ cmpl $6, %eax /* check for Pentium family, is 0xf for P4 */
+ jne .L_dont_use_mmx
+ testl $0x800000, %edx /* test if MMX feature is set (bit 23) */
+ jnz .L_use_mmx
+ jmp .L_dont_use_mmx
+.L_use_mmx:
+ movl $DO_USE_MMX, inflate_fast_use_mmx
+ jmp .L_check_mmx_pop
+.L_dont_use_mmx:
+ movl $DONT_USE_MMX, inflate_fast_use_mmx
+.L_check_mmx_pop:
+ popl %edx
+ popl %ecx
+ popl %ebx
+ popl %eax
+ jmp .L_check_mmx
+#endif
+
+
+/*** Non-MMX code ***/
+
+#if defined ( NO_MMX ) || defined( RUN_TIME_MMX )
+
+#define hold_r %ebp
+#define bits_r %bl
+#define bitslong_r %ebx
+
+.align 32,0x90
+.L_while_test:
+ /* while (in < last && out < end)
+ */
+ cmpl out_r, end(%esp)
+ jbe .L_break_loop /* if (out >= end) */
+
+ cmpl in_r, last(%esp)
+ jbe .L_break_loop
+
+.L_do_loop:
+ /* regs: %esi = in, %ebp = hold, %bl = bits, %edi = out
+ *
+ * do {
+ * if (bits < 15) {
+ * hold |= *((unsigned short *)in)++ << bits;
+ * bits += 16
+ * }
+ * this = lcode[hold & lmask]
+ */
+ cmpb $15, bits_r
+ ja .L_get_length_code /* if (15 < bits) */
+
+ xorl %eax, %eax
+ lodsw /* al = *(ushort *)in++ */
+ movb bits_r, %cl /* cl = bits, needs it for shifting */
+ addb $16, bits_r /* bits += 16 */
+ shll %cl, %eax
+ orl %eax, hold_r /* hold |= *((ushort *)in)++ << bits */
+
+.L_get_length_code:
+ movl lmask(%esp), %edx /* edx = lmask */
+ movl lcode(%esp), %ecx /* ecx = lcode */
+ andl hold_r, %edx /* edx &= hold */
+ movl (%ecx,%edx,4), %eax /* eax = lcode[hold & lmask] */
+
+.L_dolen:
+ /* regs: %esi = in, %ebp = hold, %bl = bits, %edi = out
+ *
+ * dolen:
+ * bits -= this.bits;
+ * hold >>= this.bits
+ */
+ movb %ah, %cl /* cl = this.bits */
+ subb %ah, bits_r /* bits -= this.bits */
+ shrl %cl, hold_r /* hold >>= this.bits */
+
+ /* check if op is a literal
+ * if (op == 0) {
+ * PUP(out) = this.val;
+ * }
+ */
+ testb %al, %al
+ jnz .L_test_for_length_base /* if (op != 0) 45.7% */
+
+ shrl $16, %eax /* output this.val char */
+ stosb
+ jmp .L_while_test
+
+.L_test_for_length_base:
+ /* regs: %esi = in, %ebp = hold, %bl = bits, %edi = out, %edx = len
+ *
+ * else if (op & 16) {
+ * len = this.val
+ * op &= 15
+ * if (op) {
+ * if (op > bits) {
+ * hold |= *((unsigned short *)in)++ << bits;
+ * bits += 16
+ * }
+ * len += hold & mask[op];
+ * bits -= op;
+ * hold >>= op;
+ * }
+ */
+#define len_r %edx
+ movl %eax, len_r /* len = this */
+ shrl $16, len_r /* len = this.val */
+ movb %al, %cl
+
+ testb $16, %al
+ jz .L_test_for_second_level_length /* if ((op & 16) == 0) 8% */
+ andb $15, %cl /* op &= 15 */
+ jz .L_save_len /* if (!op) */
+ cmpb %cl, bits_r
+ jae .L_add_bits_to_len /* if (op <= bits) */
+
+ movb %cl, %ch /* stash op in ch, freeing cl */
+ xorl %eax, %eax
+ lodsw /* al = *(ushort *)in++ */
+ movb bits_r, %cl /* cl = bits, needs it for shifting */
+ addb $16, bits_r /* bits += 16 */
+ shll %cl, %eax
+ orl %eax, hold_r /* hold |= *((ushort *)in)++ << bits */
+ movb %ch, %cl /* move op back to ecx */
+
+.L_add_bits_to_len:
+ movl $1, %eax
+ shll %cl, %eax
+ decl %eax
+ subb %cl, bits_r
+ andl hold_r, %eax /* eax &= hold */
+ shrl %cl, hold_r
+ addl %eax, len_r /* len += hold & mask[op] */
+
+.L_save_len:
+ movl len_r, len(%esp) /* save len */
+#undef len_r
+
+.L_decode_distance:
+ /* regs: %esi = in, %ebp = hold, %bl = bits, %edi = out, %edx = dist
+ *
+ * if (bits < 15) {
+ * hold |= *((unsigned short *)in)++ << bits;
+ * bits += 16
+ * }
+ * this = dcode[hold & dmask];
+ * dodist:
+ * bits -= this.bits;
+ * hold >>= this.bits;
+ * op = this.op;
+ */
+
+ cmpb $15, bits_r
+ ja .L_get_distance_code /* if (15 < bits) */
+
+ xorl %eax, %eax
+ lodsw /* al = *(ushort *)in++ */
+ movb bits_r, %cl /* cl = bits, needs it for shifting */
+ addb $16, bits_r /* bits += 16 */
+ shll %cl, %eax
+ orl %eax, hold_r /* hold |= *((ushort *)in)++ << bits */
+
+.L_get_distance_code:
+ movl dmask(%esp), %edx /* edx = dmask */
+ movl dcode(%esp), %ecx /* ecx = dcode */
+ andl hold_r, %edx /* edx &= hold */
+ movl (%ecx,%edx,4), %eax /* eax = dcode[hold & dmask] */
+
+#define dist_r %edx
+.L_dodist:
+ movl %eax, dist_r /* dist = this */
+ shrl $16, dist_r /* dist = this.val */
+ movb %ah, %cl
+ subb %ah, bits_r /* bits -= this.bits */
+ shrl %cl, hold_r /* hold >>= this.bits */
+
+ /* if (op & 16) {
+ * dist = this.val
+ * op &= 15
+ * if (op > bits) {
+ * hold |= *((unsigned short *)in)++ << bits;
+ * bits += 16
+ * }
+ * dist += hold & mask[op];
+ * bits -= op;
+ * hold >>= op;
+ */
+ movb %al, %cl /* cl = this.op */
+
+ testb $16, %al /* if ((op & 16) == 0) */
+ jz .L_test_for_second_level_dist
+ andb $15, %cl /* op &= 15 */
+ jz .L_check_dist_one
+ cmpb %cl, bits_r
+ jae .L_add_bits_to_dist /* if (op <= bits) 97.6% */
+
+ movb %cl, %ch /* stash op in ch, freeing cl */
+ xorl %eax, %eax
+ lodsw /* al = *(ushort *)in++ */
+ movb bits_r, %cl /* cl = bits, needs it for shifting */
+ addb $16, bits_r /* bits += 16 */
+ shll %cl, %eax
+ orl %eax, hold_r /* hold |= *((ushort *)in)++ << bits */
+ movb %ch, %cl /* move op back to ecx */
+
+.L_add_bits_to_dist:
+ movl $1, %eax
+ shll %cl, %eax
+ decl %eax /* (1 << op) - 1 */
+ subb %cl, bits_r
+ andl hold_r, %eax /* eax &= hold */
+ shrl %cl, hold_r
+ addl %eax, dist_r /* dist += hold & ((1 << op) - 1) */
+ jmp .L_check_window
+
+.L_check_window:
+ /* regs: %esi = from, %ebp = hold, %bl = bits, %edi = out, %edx = dist
+ * %ecx = nbytes
+ *
+ * nbytes = out - beg;
+ * if (dist <= nbytes) {
+ * from = out - dist;
+ * do {
+ * PUP(out) = PUP(from);
+ * } while (--len > 0) {
+ * }
+ */
+
+ movl in_r, in(%esp) /* save in so from can use it's reg */
+ movl out_r, %eax
+ subl beg(%esp), %eax /* nbytes = out - beg */
+
+ cmpl dist_r, %eax
+ jb .L_clip_window /* if (dist > nbytes) 4.2% */
+
+ movl len(%esp), %ecx
+ movl out_r, from_r
+ subl dist_r, from_r /* from = out - dist */
+
+ subl $3, %ecx
+ movb (from_r), %al
+ movb %al, (out_r)
+ movb 1(from_r), %al
+ movb 2(from_r), %dl
+ addl $3, from_r
+ movb %al, 1(out_r)
+ movb %dl, 2(out_r)
+ addl $3, out_r
+ rep movsb
+
+ movl in(%esp), in_r /* move in back to %esi, toss from */
+ jmp .L_while_test
+
+.align 16,0x90
+.L_check_dist_one:
+ cmpl $1, dist_r
+ jne .L_check_window
+ cmpl out_r, beg(%esp)
+ je .L_check_window
+
+ decl out_r
+ movl len(%esp), %ecx
+ movb (out_r), %al
+ subl $3, %ecx
+
+ movb %al, 1(out_r)
+ movb %al, 2(out_r)
+ movb %al, 3(out_r)
+ addl $4, out_r
+ rep stosb
+
+ jmp .L_while_test
+
+.align 16,0x90
+.L_test_for_second_level_length:
+ /* else if ((op & 64) == 0) {
+ * this = lcode[this.val + (hold & mask[op])];
+ * }
+ */
+ testb $64, %al
+ jnz .L_test_for_end_of_block /* if ((op & 64) != 0) */
+
+ movl $1, %eax
+ shll %cl, %eax
+ decl %eax
+ andl hold_r, %eax /* eax &= hold */
+ addl %edx, %eax /* eax += this.val */
+ movl lcode(%esp), %edx /* edx = lcode */
+ movl (%edx,%eax,4), %eax /* eax = lcode[val + (hold&mask[op])] */
+ jmp .L_dolen
+
+.align 16,0x90
+.L_test_for_second_level_dist:
+ /* else if ((op & 64) == 0) {
+ * this = dcode[this.val + (hold & mask[op])];
+ * }
+ */
+ testb $64, %al
+ jnz .L_invalid_distance_code /* if ((op & 64) != 0) */
+
+ movl $1, %eax
+ shll %cl, %eax
+ decl %eax
+ andl hold_r, %eax /* eax &= hold */
+ addl %edx, %eax /* eax += this.val */
+ movl dcode(%esp), %edx /* edx = dcode */
+ movl (%edx,%eax,4), %eax /* eax = dcode[val + (hold&mask[op])] */
+ jmp .L_dodist
+
+.align 16,0x90
+.L_clip_window:
+ /* regs: %esi = from, %ebp = hold, %bl = bits, %edi = out, %edx = dist
+ * %ecx = nbytes
+ *
+ * else {
+ * if (dist > wsize) {
+ * invalid distance
+ * }
+ * from = window;
+ * nbytes = dist - nbytes;
+ * if (write == 0) {
+ * from += wsize - nbytes;
+ */
+#define nbytes_r %ecx
+ movl %eax, nbytes_r
+ movl wsize(%esp), %eax /* prepare for dist compare */
+ negl nbytes_r /* nbytes = -nbytes */
+ movl window(%esp), from_r /* from = window */
+
+ cmpl dist_r, %eax
+ jb .L_invalid_distance_too_far /* if (dist > wsize) */
+
+ addl dist_r, nbytes_r /* nbytes = dist - nbytes */
+ cmpl $0, write(%esp)
+ jne .L_wrap_around_window /* if (write != 0) */
+
+ subl nbytes_r, %eax
+ addl %eax, from_r /* from += wsize - nbytes */
+
+ /* regs: %esi = from, %ebp = hold, %bl = bits, %edi = out, %edx = dist
+ * %ecx = nbytes, %eax = len
+ *
+ * if (nbytes < len) {
+ * len -= nbytes;
+ * do {
+ * PUP(out) = PUP(from);
+ * } while (--nbytes);
+ * from = out - dist;
+ * }
+ * }
+ */
+#define len_r %eax
+ movl len(%esp), len_r
+ cmpl nbytes_r, len_r
+ jbe .L_do_copy1 /* if (nbytes >= len) */
+
+ subl nbytes_r, len_r /* len -= nbytes */
+ rep movsb
+ movl out_r, from_r
+ subl dist_r, from_r /* from = out - dist */
+ jmp .L_do_copy1
+
+ cmpl nbytes_r, len_r
+ jbe .L_do_copy1 /* if (nbytes >= len) */
+
+ subl nbytes_r, len_r /* len -= nbytes */
+ rep movsb
+ movl out_r, from_r
+ subl dist_r, from_r /* from = out - dist */
+ jmp .L_do_copy1
+
+.L_wrap_around_window:
+ /* regs: %esi = from, %ebp = hold, %bl = bits, %edi = out, %edx = dist
+ * %ecx = nbytes, %eax = write, %eax = len
+ *
+ * else if (write < nbytes) {
+ * from += wsize + write - nbytes;
+ * nbytes -= write;
+ * if (nbytes < len) {
+ * len -= nbytes;
+ * do {
+ * PUP(out) = PUP(from);
+ * } while (--nbytes);
+ * from = window;
+ * nbytes = write;
+ * if (nbytes < len) {
+ * len -= nbytes;
+ * do {
+ * PUP(out) = PUP(from);
+ * } while(--nbytes);
+ * from = out - dist;
+ * }
+ * }
+ * }
+ */
+#define write_r %eax
+ movl write(%esp), write_r
+ cmpl write_r, nbytes_r
+ jbe .L_contiguous_in_window /* if (write >= nbytes) */
+
+ addl wsize(%esp), from_r
+ addl write_r, from_r
+ subl nbytes_r, from_r /* from += wsize + write - nbytes */
+ subl write_r, nbytes_r /* nbytes -= write */
+#undef write_r
+
+ movl len(%esp), len_r
+ cmpl nbytes_r, len_r
+ jbe .L_do_copy1 /* if (nbytes >= len) */
+
+ subl nbytes_r, len_r /* len -= nbytes */
+ rep movsb
+ movl window(%esp), from_r /* from = window */
+ movl write(%esp), nbytes_r /* nbytes = write */
+ cmpl nbytes_r, len_r
+ jbe .L_do_copy1 /* if (nbytes >= len) */
+
+ subl nbytes_r, len_r /* len -= nbytes */
+ rep movsb
+ movl out_r, from_r
+ subl dist_r, from_r /* from = out - dist */
+ jmp .L_do_copy1
+
+.L_contiguous_in_window:
+ /* regs: %esi = from, %ebp = hold, %bl = bits, %edi = out, %edx = dist
+ * %ecx = nbytes, %eax = write, %eax = len
+ *
+ * else {
+ * from += write - nbytes;
+ * if (nbytes < len) {
+ * len -= nbytes;
+ * do {
+ * PUP(out) = PUP(from);
+ * } while (--nbytes);
+ * from = out - dist;
+ * }
+ * }
+ */
+#define write_r %eax
+ addl write_r, from_r
+ subl nbytes_r, from_r /* from += write - nbytes */
+#undef write_r
+
+ movl len(%esp), len_r
+ cmpl nbytes_r, len_r
+ jbe .L_do_copy1 /* if (nbytes >= len) */
+
+ subl nbytes_r, len_r /* len -= nbytes */
+ rep movsb
+ movl out_r, from_r
+ subl dist_r, from_r /* from = out - dist */
+
+.L_do_copy1:
+ /* regs: %esi = from, %esi = in, %ebp = hold, %bl = bits, %edi = out
+ * %eax = len
+ *
+ * while (len > 0) {
+ * PUP(out) = PUP(from);
+ * len--;
+ * }
+ * }
+ * } while (in < last && out < end);
+ */
+#undef nbytes_r
+#define in_r %esi
+ movl len_r, %ecx
+ rep movsb
+
+ movl in(%esp), in_r /* move in back to %esi, toss from */
+ jmp .L_while_test
+
+#undef len_r
+#undef dist_r
+
+#endif /* NO_MMX || RUN_TIME_MMX */
+
+
+/*** MMX code ***/
+
+#if defined( USE_MMX ) || defined( RUN_TIME_MMX )
+
+.align 32,0x90
+.L_init_mmx:
+ emms
+
+#undef bits_r
+#undef bitslong_r
+#define bitslong_r %ebp
+#define hold_mm %mm0
+ movd %ebp, hold_mm
+ movl %ebx, bitslong_r
+
+#define used_mm %mm1
+#define dmask2_mm %mm2
+#define lmask2_mm %mm3
+#define lmask_mm %mm4
+#define dmask_mm %mm5
+#define tmp_mm %mm6
+
+ movd lmask(%esp), lmask_mm
+ movq lmask_mm, lmask2_mm
+ movd dmask(%esp), dmask_mm
+ movq dmask_mm, dmask2_mm
+ pxor used_mm, used_mm
+ movl lcode(%esp), %ebx /* ebx = lcode */
+ jmp .L_do_loop_mmx
+
+.align 32,0x90
+.L_while_test_mmx:
+ /* while (in < last && out < end)
+ */
+ cmpl out_r, end(%esp)
+ jbe .L_break_loop /* if (out >= end) */
+
+ cmpl in_r, last(%esp)
+ jbe .L_break_loop
+
+.L_do_loop_mmx:
+ psrlq used_mm, hold_mm /* hold_mm >>= last bit length */
+
+ cmpl $32, bitslong_r
+ ja .L_get_length_code_mmx /* if (32 < bits) */
+
+ movd bitslong_r, tmp_mm
+ movd (in_r), %mm7
+ addl $4, in_r
+ psllq tmp_mm, %mm7
+ addl $32, bitslong_r
+ por %mm7, hold_mm /* hold_mm |= *((uint *)in)++ << bits */
+
+.L_get_length_code_mmx:
+ pand hold_mm, lmask_mm
+ movd lmask_mm, %eax
+ movq lmask2_mm, lmask_mm
+ movl (%ebx,%eax,4), %eax /* eax = lcode[hold & lmask] */
+
+.L_dolen_mmx:
+ movzbl %ah, %ecx /* ecx = this.bits */
+ movd %ecx, used_mm
+ subl %ecx, bitslong_r /* bits -= this.bits */
+
+ testb %al, %al
+ jnz .L_test_for_length_base_mmx /* if (op != 0) 45.7% */
+
+ shrl $16, %eax /* output this.val char */
+ stosb
+ jmp .L_while_test_mmx
+
+.L_test_for_length_base_mmx:
+#define len_r %edx
+ movl %eax, len_r /* len = this */
+ shrl $16, len_r /* len = this.val */
+
+ testb $16, %al
+ jz .L_test_for_second_level_length_mmx /* if ((op & 16) == 0) 8% */
+ andl $15, %eax /* op &= 15 */
+ jz .L_decode_distance_mmx /* if (!op) */
+
+ psrlq used_mm, hold_mm /* hold_mm >>= last bit length */
+ movd %eax, used_mm
+ movd hold_mm, %ecx
+ subl %eax, bitslong_r
+ andl .L_mask(,%eax,4), %ecx
+ addl %ecx, len_r /* len += hold & mask[op] */
+
+.L_decode_distance_mmx:
+ psrlq used_mm, hold_mm /* hold_mm >>= last bit length */
+
+ cmpl $32, bitslong_r
+ ja .L_get_dist_code_mmx /* if (32 < bits) */
+
+ movd bitslong_r, tmp_mm
+ movd (in_r), %mm7
+ addl $4, in_r
+ psllq tmp_mm, %mm7
+ addl $32, bitslong_r
+ por %mm7, hold_mm /* hold_mm |= *((uint *)in)++ << bits */
+
+.L_get_dist_code_mmx:
+ movl dcode(%esp), %ebx /* ebx = dcode */
+ pand hold_mm, dmask_mm
+ movd dmask_mm, %eax
+ movq dmask2_mm, dmask_mm
+ movl (%ebx,%eax,4), %eax /* eax = dcode[hold & lmask] */
+
+.L_dodist_mmx:
+#define dist_r %ebx
+ movzbl %ah, %ecx /* ecx = this.bits */
+ movl %eax, dist_r
+ shrl $16, dist_r /* dist = this.val */
+ subl %ecx, bitslong_r /* bits -= this.bits */
+ movd %ecx, used_mm
+
+ testb $16, %al /* if ((op & 16) == 0) */
+ jz .L_test_for_second_level_dist_mmx
+ andl $15, %eax /* op &= 15 */
+ jz .L_check_dist_one_mmx
+
+.L_add_bits_to_dist_mmx:
+ psrlq used_mm, hold_mm /* hold_mm >>= last bit length */
+ movd %eax, used_mm /* save bit length of current op */
+ movd hold_mm, %ecx /* get the next bits on input stream */
+ subl %eax, bitslong_r /* bits -= op bits */
+ andl .L_mask(,%eax,4), %ecx /* ecx = hold & mask[op] */
+ addl %ecx, dist_r /* dist += hold & mask[op] */
+
+.L_check_window_mmx:
+ movl in_r, in(%esp) /* save in so from can use it's reg */
+ movl out_r, %eax
+ subl beg(%esp), %eax /* nbytes = out - beg */
+
+ cmpl dist_r, %eax
+ jb .L_clip_window_mmx /* if (dist > nbytes) 4.2% */
+
+ movl len_r, %ecx
+ movl out_r, from_r
+ subl dist_r, from_r /* from = out - dist */
+
+ subl $3, %ecx
+ movb (from_r), %al
+ movb %al, (out_r)
+ movb 1(from_r), %al
+ movb 2(from_r), %dl
+ addl $3, from_r
+ movb %al, 1(out_r)
+ movb %dl, 2(out_r)
+ addl $3, out_r
+ rep movsb
+
+ movl in(%esp), in_r /* move in back to %esi, toss from */
+ movl lcode(%esp), %ebx /* move lcode back to %ebx, toss dist */
+ jmp .L_while_test_mmx
+
+.align 16,0x90
+.L_check_dist_one_mmx:
+ cmpl $1, dist_r
+ jne .L_check_window_mmx
+ cmpl out_r, beg(%esp)
+ je .L_check_window_mmx
+
+ decl out_r
+ movl len_r, %ecx
+ movb (out_r), %al
+ subl $3, %ecx
+
+ movb %al, 1(out_r)
+ movb %al, 2(out_r)
+ movb %al, 3(out_r)
+ addl $4, out_r
+ rep stosb
+
+ movl lcode(%esp), %ebx /* move lcode back to %ebx, toss dist */
+ jmp .L_while_test_mmx
+
+.align 16,0x90
+.L_test_for_second_level_length_mmx:
+ testb $64, %al
+ jnz .L_test_for_end_of_block /* if ((op & 64) != 0) */
+
+ andl $15, %eax
+ psrlq used_mm, hold_mm /* hold_mm >>= last bit length */
+ movd hold_mm, %ecx
+ andl .L_mask(,%eax,4), %ecx
+ addl len_r, %ecx
+ movl (%ebx,%ecx,4), %eax /* eax = lcode[hold & lmask] */
+ jmp .L_dolen_mmx
+
+.align 16,0x90
+.L_test_for_second_level_dist_mmx:
+ testb $64, %al
+ jnz .L_invalid_distance_code /* if ((op & 64) != 0) */
+
+ andl $15, %eax
+ psrlq used_mm, hold_mm /* hold_mm >>= last bit length */
+ movd hold_mm, %ecx
+ andl .L_mask(,%eax,4), %ecx
+ movl dcode(%esp), %eax /* ecx = dcode */
+ addl dist_r, %ecx
+ movl (%eax,%ecx,4), %eax /* eax = lcode[hold & lmask] */
+ jmp .L_dodist_mmx
+
+.align 16,0x90
+.L_clip_window_mmx:
+#define nbytes_r %ecx
+ movl %eax, nbytes_r
+ movl wsize(%esp), %eax /* prepare for dist compare */
+ negl nbytes_r /* nbytes = -nbytes */
+ movl window(%esp), from_r /* from = window */
+
+ cmpl dist_r, %eax
+ jb .L_invalid_distance_too_far /* if (dist > wsize) */
+
+ addl dist_r, nbytes_r /* nbytes = dist - nbytes */
+ cmpl $0, write(%esp)
+ jne .L_wrap_around_window_mmx /* if (write != 0) */
+
+ subl nbytes_r, %eax
+ addl %eax, from_r /* from += wsize - nbytes */
+
+ cmpl nbytes_r, len_r
+ jbe .L_do_copy1_mmx /* if (nbytes >= len) */
+
+ subl nbytes_r, len_r /* len -= nbytes */
+ rep movsb
+ movl out_r, from_r
+ subl dist_r, from_r /* from = out - dist */
+ jmp .L_do_copy1_mmx
+
+ cmpl nbytes_r, len_r
+ jbe .L_do_copy1_mmx /* if (nbytes >= len) */
+
+ subl nbytes_r, len_r /* len -= nbytes */
+ rep movsb
+ movl out_r, from_r
+ subl dist_r, from_r /* from = out - dist */
+ jmp .L_do_copy1_mmx
+
+.L_wrap_around_window_mmx:
+#define write_r %eax
+ movl write(%esp), write_r
+ cmpl write_r, nbytes_r
+ jbe .L_contiguous_in_window_mmx /* if (write >= nbytes) */
+
+ addl wsize(%esp), from_r
+ addl write_r, from_r
+ subl nbytes_r, from_r /* from += wsize + write - nbytes */
+ subl write_r, nbytes_r /* nbytes -= write */
+#undef write_r
+
+ cmpl nbytes_r, len_r
+ jbe .L_do_copy1_mmx /* if (nbytes >= len) */
+
+ subl nbytes_r, len_r /* len -= nbytes */
+ rep movsb
+ movl window(%esp), from_r /* from = window */
+ movl write(%esp), nbytes_r /* nbytes = write */
+ cmpl nbytes_r, len_r
+ jbe .L_do_copy1_mmx /* if (nbytes >= len) */
+
+ subl nbytes_r, len_r /* len -= nbytes */
+ rep movsb
+ movl out_r, from_r
+ subl dist_r, from_r /* from = out - dist */
+ jmp .L_do_copy1_mmx
+
+.L_contiguous_in_window_mmx:
+#define write_r %eax
+ addl write_r, from_r
+ subl nbytes_r, from_r /* from += write - nbytes */
+#undef write_r
+
+ cmpl nbytes_r, len_r
+ jbe .L_do_copy1_mmx /* if (nbytes >= len) */
+
+ subl nbytes_r, len_r /* len -= nbytes */
+ rep movsb
+ movl out_r, from_r
+ subl dist_r, from_r /* from = out - dist */
+
+.L_do_copy1_mmx:
+#undef nbytes_r
+#define in_r %esi
+ movl len_r, %ecx
+ rep movsb
+
+ movl in(%esp), in_r /* move in back to %esi, toss from */
+ movl lcode(%esp), %ebx /* move lcode back to %ebx, toss dist */
+ jmp .L_while_test_mmx
+
+#undef hold_r
+#undef bitslong_r
+
+#endif /* USE_MMX || RUN_TIME_MMX */
+
+
+/*** USE_MMX, NO_MMX, and RUNTIME_MMX from here on ***/
+
+.L_invalid_distance_code:
+ /* else {
+ * strm->msg = "invalid distance code";
+ * state->mode = BAD;
+ * }
+ */
+ movl $.L_invalid_distance_code_msg, %ecx
+ movl $INFLATE_MODE_BAD, %edx
+ jmp .L_update_stream_state
+
+.L_test_for_end_of_block:
+ /* else if (op & 32) {
+ * state->mode = TYPE;
+ * break;
+ * }
+ */
+ testb $32, %al
+ jz .L_invalid_literal_length_code /* if ((op & 32) == 0) */
+
+ movl $0, %ecx
+ movl $INFLATE_MODE_TYPE, %edx
+ jmp .L_update_stream_state
+
+.L_invalid_literal_length_code:
+ /* else {
+ * strm->msg = "invalid literal/length code";
+ * state->mode = BAD;
+ * }
+ */
+ movl $.L_invalid_literal_length_code_msg, %ecx
+ movl $INFLATE_MODE_BAD, %edx
+ jmp .L_update_stream_state
+
+.L_invalid_distance_too_far:
+ /* strm->msg = "invalid distance too far back";
+ * state->mode = BAD;
+ */
+ movl in(%esp), in_r /* from_r has in's reg, put in back */
+ movl $.L_invalid_distance_too_far_msg, %ecx
+ movl $INFLATE_MODE_BAD, %edx
+ jmp .L_update_stream_state
+
+.L_update_stream_state:
+ /* set strm->msg = %ecx, strm->state->mode = %edx */
+ movl strm_sp(%esp), %eax
+ testl %ecx, %ecx /* if (msg != NULL) */
+ jz .L_skip_msg
+ movl %ecx, msg_strm(%eax) /* strm->msg = msg */
+.L_skip_msg:
+ movl state_strm(%eax), %eax /* state = strm->state */
+ movl %edx, mode_state(%eax) /* state->mode = edx (BAD | TYPE) */
+ jmp .L_break_loop
+
+.align 32,0x90
+.L_break_loop:
+
+/*
+ * Regs:
+ *
+ * bits = %ebp when mmx, and in %ebx when non-mmx
+ * hold = %hold_mm when mmx, and in %ebp when non-mmx
+ * in = %esi
+ * out = %edi
+ */
+
+#if defined( USE_MMX ) || defined( RUN_TIME_MMX )
+
+#if defined( RUN_TIME_MMX )
+
+ cmpl $DO_USE_MMX, inflate_fast_use_mmx
+ jne .L_update_next_in
+
+#endif /* RUN_TIME_MMX */
+
+ movl %ebp, %ebx
+
+.L_update_next_in:
+
+#endif
+
+#define strm_r %eax
+#define state_r %edx
+
+ /* len = bits >> 3;
+ * in -= len;
+ * bits -= len << 3;
+ * hold &= (1U << bits) - 1;
+ * state->hold = hold;
+ * state->bits = bits;
+ * strm->next_in = in;
+ * strm->next_out = out;
+ */
+ movl strm_sp(%esp), strm_r
+ movl %ebx, %ecx
+ movl state_strm(strm_r), state_r
+ shrl $3, %ecx
+ subl %ecx, in_r
+ shll $3, %ecx
+ subl %ecx, %ebx
+ movl out_r, next_out_strm(strm_r)
+ movl %ebx, bits_state(state_r)
+ movl %ebx, %ecx
+
+ leal buf(%esp), %ebx
+ cmpl %ebx, last(%esp)
+ jne .L_buf_not_used /* if buf != last */
+
+ subl %ebx, in_r /* in -= buf */
+ movl next_in_strm(strm_r), %ebx
+ movl %ebx, last(%esp) /* last = strm->next_in */
+ addl %ebx, in_r /* in += strm->next_in */
+ movl avail_in_strm(strm_r), %ebx
+ subl $11, %ebx
+ addl %ebx, last(%esp) /* last = &strm->next_in[ avail_in - 11 ] */
+
+.L_buf_not_used:
+ movl in_r, next_in_strm(strm_r)
+
+ movl $1, %ebx
+ shll %cl, %ebx
+ decl %ebx
+
+#if defined( USE_MMX ) || defined( RUN_TIME_MMX )
+
+#if defined( RUN_TIME_MMX )
+
+ cmpl $DO_USE_MMX, inflate_fast_use_mmx
+ jne .L_update_hold
+
+#endif /* RUN_TIME_MMX */
+
+ psrlq used_mm, hold_mm /* hold_mm >>= last bit length */
+ movd hold_mm, %ebp
+
+ emms
+
+.L_update_hold:
+
+#endif /* USE_MMX || RUN_TIME_MMX */
+
+ andl %ebx, %ebp
+ movl %ebp, hold_state(state_r)
+
+#define last_r %ebx
+
+ /* strm->avail_in = in < last ? 11 + (last - in) : 11 - (in - last) */
+ movl last(%esp), last_r
+ cmpl in_r, last_r
+ jbe .L_last_is_smaller /* if (in >= last) */
+
+ subl in_r, last_r /* last -= in */
+ addl $11, last_r /* last += 11 */
+ movl last_r, avail_in_strm(strm_r)
+ jmp .L_fixup_out
+.L_last_is_smaller:
+ subl last_r, in_r /* in -= last */
+ negl in_r /* in = -in */
+ addl $11, in_r /* in += 11 */
+ movl in_r, avail_in_strm(strm_r)
+
+#undef last_r
+#define end_r %ebx
+
+.L_fixup_out:
+ /* strm->avail_out = out < end ? 257 + (end - out) : 257 - (out - end)*/
+ movl end(%esp), end_r
+ cmpl out_r, end_r
+ jbe .L_end_is_smaller /* if (out >= end) */
+
+ subl out_r, end_r /* end -= out */
+ addl $257, end_r /* end += 257 */
+ movl end_r, avail_out_strm(strm_r)
+ jmp .L_done
+.L_end_is_smaller:
+ subl end_r, out_r /* out -= end */
+ negl out_r /* out = -out */
+ addl $257, out_r /* out += 257 */
+ movl out_r, avail_out_strm(strm_r)
+
+#undef end_r
+#undef strm_r
+#undef state_r
+
+.L_done:
+ addl $local_var_size, %esp
+ popf
+ popl %ebx
+ popl %ebp
+ popl %esi
+ popl %edi
+ ret
+
+#if defined( GAS_ELF )
+/* elf info */
+.type inflate_fast,@function
+.size inflate_fast,.-inflate_fast
+#endif