diff options
Diffstat (limited to 'lib/libmd/i386')
-rw-r--r-- | lib/libmd/i386/rmd160.S | 2019 | ||||
-rw-r--r-- | lib/libmd/i386/sha.S | 1952 |
2 files changed, 3971 insertions, 0 deletions
diff --git a/lib/libmd/i386/rmd160.S b/lib/libmd/i386/rmd160.S new file mode 100644 index 0000000..7ccfb22 --- /dev/null +++ b/lib/libmd/i386/rmd160.S @@ -0,0 +1,2019 @@ +/* $FreeBSD$ */ +/* Run the C pre-processor over this file with one of the following defined + * ELF - elf object files, + * OUT - a.out object files, + * BSDI - BSDI style a.out object files + * SOL - Solaris style elf + */ + +#ifndef PIC +#define TYPE(a,b) .type a,b +#define SIZE(a,b) .size a,b + +#if defined(OUT) || defined(BSDI) +#define ripemd160_block_x86 _ripemd160_block_x86 + +#endif + +#ifdef OUT +#define OK 1 +#define ALIGN 4 +#endif + +#ifdef BSDI +#define OK 1 +#define ALIGN 4 +#undef SIZE +#undef TYPE +#define SIZE(a,b) +#define TYPE(a,b) +#endif + +#if defined(ELF) || defined(SOL) +#define OK 1 +#define ALIGN 4 +#endif + +#ifndef OK +You need to define one of +ELF - elf systems - linux-elf, NetBSD and DG-UX +OUT - a.out systems - linux-a.out and FreeBSD +SOL - solaris systems, which are elf with strange comment lines +BSDI - a.out with a very primative version of as. +#endif + +/* Let the Assembler begin :-) */ + /* Don't even think of reading this code */ + /* It was automatically generated by rmd-586.pl */ + /* Which is a perl program used to generate the x86 assember for */ + /* any of elf, a.out, BSDI,Win32, or Solaris */ + /* eric <eay@cryptsoft.com> */ + + .file "rmd-586.s" + .version "01.01" +gcc2_compiled.: +.text + .p2align ALIGN +.globl ripemd160_block_x86 + TYPE(ripemd160_block_x86,@function) +ripemd160_block_x86: + pushl %esi + movl 16(%esp), %ecx + pushl %edi + movl 16(%esp), %esi + pushl %ebp + addl %esi, %ecx + pushl %ebx + subl $64, %ecx + subl $88, %esp + movl %ecx, (%esp) + movl 108(%esp), %edi +.L000start: + + movl (%esi), %eax + movl 4(%esi), %ebx + movl %eax, 4(%esp) + movl %ebx, 8(%esp) + movl 8(%esi), %eax + movl 12(%esi), %ebx + movl %eax, 12(%esp) + movl %ebx, 16(%esp) + movl 16(%esi), %eax + movl 20(%esi), %ebx + movl %eax, 20(%esp) + movl %ebx, 24(%esp) + movl 24(%esi), %eax + movl 28(%esi), %ebx + movl %eax, 28(%esp) + movl %ebx, 32(%esp) + movl 32(%esi), %eax + movl 36(%esi), %ebx + movl %eax, 36(%esp) + movl %ebx, 40(%esp) + movl 40(%esi), %eax + movl 44(%esi), %ebx + movl %eax, 44(%esp) + movl %ebx, 48(%esp) + movl 48(%esi), %eax + movl 52(%esi), %ebx + movl %eax, 52(%esp) + movl %ebx, 56(%esp) + movl 56(%esi), %eax + movl 60(%esi), %ebx + movl %eax, 60(%esp) + movl %ebx, 64(%esp) + addl $64, %esi + movl (%edi), %eax + movl %esi, 112(%esp) + movl 4(%edi), %ebx + movl 8(%edi), %ecx + movl 12(%edi), %edx + movl 16(%edi), %ebp + /* 0 */ + movl %ecx, %esi + xorl %edx, %esi + movl 4(%esp), %edi + xorl %ebx, %esi + addl %edi, %eax + roll $10, %ecx + addl %esi, %eax + movl %ebx, %esi + roll $11, %eax + addl %ebp, %eax + /* 1 */ + xorl %ecx, %esi + movl 8(%esp), %edi + xorl %eax, %esi + addl %esi, %ebp + movl %eax, %esi + roll $10, %ebx + addl %edi, %ebp + xorl %ebx, %esi + roll $14, %ebp + addl %edx, %ebp + /* 2 */ + movl 12(%esp), %edi + xorl %ebp, %esi + addl %edi, %edx + roll $10, %eax + addl %esi, %edx + movl %ebp, %esi + roll $15, %edx + addl %ecx, %edx + /* 3 */ + xorl %eax, %esi + movl 16(%esp), %edi + xorl %edx, %esi + addl %esi, %ecx + movl %edx, %esi + roll $10, %ebp + addl %edi, %ecx + xorl %ebp, %esi + roll $12, %ecx + addl %ebx, %ecx + /* 4 */ + movl 20(%esp), %edi + xorl %ecx, %esi + addl %edi, %ebx + roll $10, %edx + addl %esi, %ebx + movl %ecx, %esi + roll $5, %ebx + addl %eax, %ebx + /* 5 */ + xorl %edx, %esi + movl 24(%esp), %edi + xorl %ebx, %esi + addl %esi, %eax + movl %ebx, %esi + roll $10, %ecx + addl %edi, %eax + xorl %ecx, %esi + roll $8, %eax + addl %ebp, %eax + /* 6 */ + movl 28(%esp), %edi + xorl %eax, %esi + addl %edi, %ebp + roll $10, %ebx + addl %esi, %ebp + movl %eax, %esi + roll $7, %ebp + addl %edx, %ebp + /* 7 */ + xorl %ebx, %esi + movl 32(%esp), %edi + xorl %ebp, %esi + addl %esi, %edx + movl %ebp, %esi + roll $10, %eax + addl %edi, %edx + xorl %eax, %esi + roll $9, %edx + addl %ecx, %edx + /* 8 */ + movl 36(%esp), %edi + xorl %edx, %esi + addl %edi, %ecx + roll $10, %ebp + addl %esi, %ecx + movl %edx, %esi + roll $11, %ecx + addl %ebx, %ecx + /* 9 */ + xorl %ebp, %esi + movl 40(%esp), %edi + xorl %ecx, %esi + addl %esi, %ebx + movl %ecx, %esi + roll $10, %edx + addl %edi, %ebx + xorl %edx, %esi + roll $13, %ebx + addl %eax, %ebx + /* 10 */ + movl 44(%esp), %edi + xorl %ebx, %esi + addl %edi, %eax + roll $10, %ecx + addl %esi, %eax + movl %ebx, %esi + roll $14, %eax + addl %ebp, %eax + /* 11 */ + xorl %ecx, %esi + movl 48(%esp), %edi + xorl %eax, %esi + addl %esi, %ebp + movl %eax, %esi + roll $10, %ebx + addl %edi, %ebp + xorl %ebx, %esi + roll $15, %ebp + addl %edx, %ebp + /* 12 */ + movl 52(%esp), %edi + xorl %ebp, %esi + addl %edi, %edx + roll $10, %eax + addl %esi, %edx + movl %ebp, %esi + roll $6, %edx + addl %ecx, %edx + /* 13 */ + xorl %eax, %esi + movl 56(%esp), %edi + xorl %edx, %esi + addl %esi, %ecx + movl %edx, %esi + roll $10, %ebp + addl %edi, %ecx + xorl %ebp, %esi + roll $7, %ecx + addl %ebx, %ecx + /* 14 */ + movl 60(%esp), %edi + xorl %ecx, %esi + addl %edi, %ebx + roll $10, %edx + addl %esi, %ebx + movl %ecx, %esi + roll $9, %ebx + addl %eax, %ebx + /* 15 */ + xorl %edx, %esi + movl 64(%esp), %edi + xorl %ebx, %esi + addl %esi, %eax + movl $-1, %esi + roll $10, %ecx + addl %edi, %eax + movl 32(%esp), %edi + roll $8, %eax + addl %ebp, %eax + /* 16 */ + addl %edi, %ebp + movl %ebx, %edi + subl %eax, %esi + andl %eax, %edi + andl %ecx, %esi + orl %esi, %edi + movl 20(%esp), %esi + roll $10, %ebx + leal 1518500249(%ebp,%edi,1),%ebp + movl $-1, %edi + roll $7, %ebp + addl %edx, %ebp + /* 17 */ + addl %esi, %edx + movl %eax, %esi + subl %ebp, %edi + andl %ebp, %esi + andl %ebx, %edi + orl %edi, %esi + movl 56(%esp), %edi + roll $10, %eax + leal 1518500249(%edx,%esi,1),%edx + movl $-1, %esi + roll $6, %edx + addl %ecx, %edx + /* 18 */ + addl %edi, %ecx + movl %ebp, %edi + subl %edx, %esi + andl %edx, %edi + andl %eax, %esi + orl %esi, %edi + movl 8(%esp), %esi + roll $10, %ebp + leal 1518500249(%ecx,%edi,1),%ecx + movl $-1, %edi + roll $8, %ecx + addl %ebx, %ecx + /* 19 */ + addl %esi, %ebx + movl %edx, %esi + subl %ecx, %edi + andl %ecx, %esi + andl %ebp, %edi + orl %edi, %esi + movl 44(%esp), %edi + roll $10, %edx + leal 1518500249(%ebx,%esi,1),%ebx + movl $-1, %esi + roll $13, %ebx + addl %eax, %ebx + /* 20 */ + addl %edi, %eax + movl %ecx, %edi + subl %ebx, %esi + andl %ebx, %edi + andl %edx, %esi + orl %esi, %edi + movl 28(%esp), %esi + roll $10, %ecx + leal 1518500249(%eax,%edi,1),%eax + movl $-1, %edi + roll $11, %eax + addl %ebp, %eax + /* 21 */ + addl %esi, %ebp + movl %ebx, %esi + subl %eax, %edi + andl %eax, %esi + andl %ecx, %edi + orl %edi, %esi + movl 64(%esp), %edi + roll $10, %ebx + leal 1518500249(%ebp,%esi,1),%ebp + movl $-1, %esi + roll $9, %ebp + addl %edx, %ebp + /* 22 */ + addl %edi, %edx + movl %eax, %edi + subl %ebp, %esi + andl %ebp, %edi + andl %ebx, %esi + orl %esi, %edi + movl 16(%esp), %esi + roll $10, %eax + leal 1518500249(%edx,%edi,1),%edx + movl $-1, %edi + roll $7, %edx + addl %ecx, %edx + /* 23 */ + addl %esi, %ecx + movl %ebp, %esi + subl %edx, %edi + andl %edx, %esi + andl %eax, %edi + orl %edi, %esi + movl 52(%esp), %edi + roll $10, %ebp + leal 1518500249(%ecx,%esi,1),%ecx + movl $-1, %esi + roll $15, %ecx + addl %ebx, %ecx + /* 24 */ + addl %edi, %ebx + movl %edx, %edi + subl %ecx, %esi + andl %ecx, %edi + andl %ebp, %esi + orl %esi, %edi + movl 4(%esp), %esi + roll $10, %edx + leal 1518500249(%ebx,%edi,1),%ebx + movl $-1, %edi + roll $7, %ebx + addl %eax, %ebx + /* 25 */ + addl %esi, %eax + movl %ecx, %esi + subl %ebx, %edi + andl %ebx, %esi + andl %edx, %edi + orl %edi, %esi + movl 40(%esp), %edi + roll $10, %ecx + leal 1518500249(%eax,%esi,1),%eax + movl $-1, %esi + roll $12, %eax + addl %ebp, %eax + /* 26 */ + addl %edi, %ebp + movl %ebx, %edi + subl %eax, %esi + andl %eax, %edi + andl %ecx, %esi + orl %esi, %edi + movl 24(%esp), %esi + roll $10, %ebx + leal 1518500249(%ebp,%edi,1),%ebp + movl $-1, %edi + roll $15, %ebp + addl %edx, %ebp + /* 27 */ + addl %esi, %edx + movl %eax, %esi + subl %ebp, %edi + andl %ebp, %esi + andl %ebx, %edi + orl %edi, %esi + movl 12(%esp), %edi + roll $10, %eax + leal 1518500249(%edx,%esi,1),%edx + movl $-1, %esi + roll $9, %edx + addl %ecx, %edx + /* 28 */ + addl %edi, %ecx + movl %ebp, %edi + subl %edx, %esi + andl %edx, %edi + andl %eax, %esi + orl %esi, %edi + movl 60(%esp), %esi + roll $10, %ebp + leal 1518500249(%ecx,%edi,1),%ecx + movl $-1, %edi + roll $11, %ecx + addl %ebx, %ecx + /* 29 */ + addl %esi, %ebx + movl %edx, %esi + subl %ecx, %edi + andl %ecx, %esi + andl %ebp, %edi + orl %edi, %esi + movl 48(%esp), %edi + roll $10, %edx + leal 1518500249(%ebx,%esi,1),%ebx + movl $-1, %esi + roll $7, %ebx + addl %eax, %ebx + /* 30 */ + addl %edi, %eax + movl %ecx, %edi + subl %ebx, %esi + andl %ebx, %edi + andl %edx, %esi + orl %esi, %edi + movl 36(%esp), %esi + roll $10, %ecx + leal 1518500249(%eax,%edi,1),%eax + movl $-1, %edi + roll $13, %eax + addl %ebp, %eax + /* 31 */ + addl %esi, %ebp + movl %ebx, %esi + subl %eax, %edi + andl %eax, %esi + andl %ecx, %edi + orl %edi, %esi + movl $-1, %edi + roll $10, %ebx + leal 1518500249(%ebp,%esi,1),%ebp + subl %eax, %edi + roll $12, %ebp + addl %edx, %ebp + /* 32 */ + movl 16(%esp), %esi + orl %ebp, %edi + addl %esi, %edx + xorl %ebx, %edi + movl $-1, %esi + roll $10, %eax + leal 1859775393(%edx,%edi,1),%edx + subl %ebp, %esi + roll $11, %edx + addl %ecx, %edx + /* 33 */ + movl 44(%esp), %edi + orl %edx, %esi + addl %edi, %ecx + xorl %eax, %esi + movl $-1, %edi + roll $10, %ebp + leal 1859775393(%ecx,%esi,1),%ecx + subl %edx, %edi + roll $13, %ecx + addl %ebx, %ecx + /* 34 */ + movl 60(%esp), %esi + orl %ecx, %edi + addl %esi, %ebx + xorl %ebp, %edi + movl $-1, %esi + roll $10, %edx + leal 1859775393(%ebx,%edi,1),%ebx + subl %ecx, %esi + roll $6, %ebx + addl %eax, %ebx + /* 35 */ + movl 20(%esp), %edi + orl %ebx, %esi + addl %edi, %eax + xorl %edx, %esi + movl $-1, %edi + roll $10, %ecx + leal 1859775393(%eax,%esi,1),%eax + subl %ebx, %edi + roll $7, %eax + addl %ebp, %eax + /* 36 */ + movl 40(%esp), %esi + orl %eax, %edi + addl %esi, %ebp + xorl %ecx, %edi + movl $-1, %esi + roll $10, %ebx + leal 1859775393(%ebp,%edi,1),%ebp + subl %eax, %esi + roll $14, %ebp + addl %edx, %ebp + /* 37 */ + movl 64(%esp), %edi + orl %ebp, %esi + addl %edi, %edx + xorl %ebx, %esi + movl $-1, %edi + roll $10, %eax + leal 1859775393(%edx,%esi,1),%edx + subl %ebp, %edi + roll $9, %edx + addl %ecx, %edx + /* 38 */ + movl 36(%esp), %esi + orl %edx, %edi + addl %esi, %ecx + xorl %eax, %edi + movl $-1, %esi + roll $10, %ebp + leal 1859775393(%ecx,%edi,1),%ecx + subl %edx, %esi + roll $13, %ecx + addl %ebx, %ecx + /* 39 */ + movl 8(%esp), %edi + orl %ecx, %esi + addl %edi, %ebx + xorl %ebp, %esi + movl $-1, %edi + roll $10, %edx + leal 1859775393(%ebx,%esi,1),%ebx + subl %ecx, %edi + roll $15, %ebx + addl %eax, %ebx + /* 40 */ + movl 12(%esp), %esi + orl %ebx, %edi + addl %esi, %eax + xorl %edx, %edi + movl $-1, %esi + roll $10, %ecx + leal 1859775393(%eax,%edi,1),%eax + subl %ebx, %esi + roll $14, %eax + addl %ebp, %eax + /* 41 */ + movl 32(%esp), %edi + orl %eax, %esi + addl %edi, %ebp + xorl %ecx, %esi + movl $-1, %edi + roll $10, %ebx + leal 1859775393(%ebp,%esi,1),%ebp + subl %eax, %edi + roll $8, %ebp + addl %edx, %ebp + /* 42 */ + movl 4(%esp), %esi + orl %ebp, %edi + addl %esi, %edx + xorl %ebx, %edi + movl $-1, %esi + roll $10, %eax + leal 1859775393(%edx,%edi,1),%edx + subl %ebp, %esi + roll $13, %edx + addl %ecx, %edx + /* 43 */ + movl 28(%esp), %edi + orl %edx, %esi + addl %edi, %ecx + xorl %eax, %esi + movl $-1, %edi + roll $10, %ebp + leal 1859775393(%ecx,%esi,1),%ecx + subl %edx, %edi + roll $6, %ecx + addl %ebx, %ecx + /* 44 */ + movl 56(%esp), %esi + orl %ecx, %edi + addl %esi, %ebx + xorl %ebp, %edi + movl $-1, %esi + roll $10, %edx + leal 1859775393(%ebx,%edi,1),%ebx + subl %ecx, %esi + roll $5, %ebx + addl %eax, %ebx + /* 45 */ + movl 48(%esp), %edi + orl %ebx, %esi + addl %edi, %eax + xorl %edx, %esi + movl $-1, %edi + roll $10, %ecx + leal 1859775393(%eax,%esi,1),%eax + subl %ebx, %edi + roll $12, %eax + addl %ebp, %eax + /* 46 */ + movl 24(%esp), %esi + orl %eax, %edi + addl %esi, %ebp + xorl %ecx, %edi + movl $-1, %esi + roll $10, %ebx + leal 1859775393(%ebp,%edi,1),%ebp + subl %eax, %esi + roll $7, %ebp + addl %edx, %ebp + /* 47 */ + movl 52(%esp), %edi + orl %ebp, %esi + addl %edi, %edx + xorl %ebx, %esi + movl $-1, %edi + roll $10, %eax + leal 1859775393(%edx,%esi,1),%edx + movl %eax, %esi + roll $5, %edx + addl %ecx, %edx + /* 48 */ + subl %eax, %edi + andl %edx, %esi + andl %ebp, %edi + orl %esi, %edi + movl 8(%esp), %esi + roll $10, %ebp + leal 2400959708(%ecx,%edi,),%ecx + movl $-1, %edi + addl %esi, %ecx + movl %ebp, %esi + roll $11, %ecx + addl %ebx, %ecx + /* 49 */ + subl %ebp, %edi + andl %ecx, %esi + andl %edx, %edi + orl %esi, %edi + movl 40(%esp), %esi + roll $10, %edx + leal 2400959708(%ebx,%edi,),%ebx + movl $-1, %edi + addl %esi, %ebx + movl %edx, %esi + roll $12, %ebx + addl %eax, %ebx + /* 50 */ + subl %edx, %edi + andl %ebx, %esi + andl %ecx, %edi + orl %esi, %edi + movl 48(%esp), %esi + roll $10, %ecx + leal 2400959708(%eax,%edi,),%eax + movl $-1, %edi + addl %esi, %eax + movl %ecx, %esi + roll $14, %eax + addl %ebp, %eax + /* 51 */ + subl %ecx, %edi + andl %eax, %esi + andl %ebx, %edi + orl %esi, %edi + movl 44(%esp), %esi + roll $10, %ebx + leal 2400959708(%ebp,%edi,),%ebp + movl $-1, %edi + addl %esi, %ebp + movl %ebx, %esi + roll $15, %ebp + addl %edx, %ebp + /* 52 */ + subl %ebx, %edi + andl %ebp, %esi + andl %eax, %edi + orl %esi, %edi + movl 4(%esp), %esi + roll $10, %eax + leal 2400959708(%edx,%edi,),%edx + movl $-1, %edi + addl %esi, %edx + movl %eax, %esi + roll $14, %edx + addl %ecx, %edx + /* 53 */ + subl %eax, %edi + andl %edx, %esi + andl %ebp, %edi + orl %esi, %edi + movl 36(%esp), %esi + roll $10, %ebp + leal 2400959708(%ecx,%edi,),%ecx + movl $-1, %edi + addl %esi, %ecx + movl %ebp, %esi + roll $15, %ecx + addl %ebx, %ecx + /* 54 */ + subl %ebp, %edi + andl %ecx, %esi + andl %edx, %edi + orl %esi, %edi + movl 52(%esp), %esi + roll $10, %edx + leal 2400959708(%ebx,%edi,),%ebx + movl $-1, %edi + addl %esi, %ebx + movl %edx, %esi + roll $9, %ebx + addl %eax, %ebx + /* 55 */ + subl %edx, %edi + andl %ebx, %esi + andl %ecx, %edi + orl %esi, %edi + movl 20(%esp), %esi + roll $10, %ecx + leal 2400959708(%eax,%edi,),%eax + movl $-1, %edi + addl %esi, %eax + movl %ecx, %esi + roll $8, %eax + addl %ebp, %eax + /* 56 */ + subl %ecx, %edi + andl %eax, %esi + andl %ebx, %edi + orl %esi, %edi + movl 56(%esp), %esi + roll $10, %ebx + leal 2400959708(%ebp,%edi,),%ebp + movl $-1, %edi + addl %esi, %ebp + movl %ebx, %esi + roll $9, %ebp + addl %edx, %ebp + /* 57 */ + subl %ebx, %edi + andl %ebp, %esi + andl %eax, %edi + orl %esi, %edi + movl 16(%esp), %esi + roll $10, %eax + leal 2400959708(%edx,%edi,),%edx + movl $-1, %edi + addl %esi, %edx + movl %eax, %esi + roll $14, %edx + addl %ecx, %edx + /* 58 */ + subl %eax, %edi + andl %edx, %esi + andl %ebp, %edi + orl %esi, %edi + movl 32(%esp), %esi + roll $10, %ebp + leal 2400959708(%ecx,%edi,),%ecx + movl $-1, %edi + addl %esi, %ecx + movl %ebp, %esi + roll $5, %ecx + addl %ebx, %ecx + /* 59 */ + subl %ebp, %edi + andl %ecx, %esi + andl %edx, %edi + orl %esi, %edi + movl 64(%esp), %esi + roll $10, %edx + leal 2400959708(%ebx,%edi,),%ebx + movl $-1, %edi + addl %esi, %ebx + movl %edx, %esi + roll $6, %ebx + addl %eax, %ebx + /* 60 */ + subl %edx, %edi + andl %ebx, %esi + andl %ecx, %edi + orl %esi, %edi + movl 60(%esp), %esi + roll $10, %ecx + leal 2400959708(%eax,%edi,),%eax + movl $-1, %edi + addl %esi, %eax + movl %ecx, %esi + roll $8, %eax + addl %ebp, %eax + /* 61 */ + subl %ecx, %edi + andl %eax, %esi + andl %ebx, %edi + orl %esi, %edi + movl 24(%esp), %esi + roll $10, %ebx + leal 2400959708(%ebp,%edi,),%ebp + movl $-1, %edi + addl %esi, %ebp + movl %ebx, %esi + roll $6, %ebp + addl %edx, %ebp + /* 62 */ + subl %ebx, %edi + andl %ebp, %esi + andl %eax, %edi + orl %esi, %edi + movl 28(%esp), %esi + roll $10, %eax + leal 2400959708(%edx,%edi,),%edx + movl $-1, %edi + addl %esi, %edx + movl %eax, %esi + roll $5, %edx + addl %ecx, %edx + /* 63 */ + subl %eax, %edi + andl %edx, %esi + andl %ebp, %edi + orl %esi, %edi + movl 12(%esp), %esi + roll $10, %ebp + leal 2400959708(%ecx,%edi,),%ecx + movl $-1, %edi + addl %esi, %ecx + subl %ebp, %edi + roll $12, %ecx + addl %ebx, %ecx + /* 64 */ + movl 20(%esp), %esi + orl %edx, %edi + addl %esi, %ebx + xorl %ecx, %edi + movl $-1, %esi + roll $10, %edx + leal 2840853838(%ebx,%edi,1),%ebx + subl %edx, %esi + roll $9, %ebx + addl %eax, %ebx + /* 65 */ + movl 4(%esp), %edi + orl %ecx, %esi + addl %edi, %eax + xorl %ebx, %esi + movl $-1, %edi + roll $10, %ecx + leal 2840853838(%eax,%esi,1),%eax + subl %ecx, %edi + roll $15, %eax + addl %ebp, %eax + /* 66 */ + movl 24(%esp), %esi + orl %ebx, %edi + addl %esi, %ebp + xorl %eax, %edi + movl $-1, %esi + roll $10, %ebx + leal 2840853838(%ebp,%edi,1),%ebp + subl %ebx, %esi + roll $5, %ebp + addl %edx, %ebp + /* 67 */ + movl 40(%esp), %edi + orl %eax, %esi + addl %edi, %edx + xorl %ebp, %esi + movl $-1, %edi + roll $10, %eax + leal 2840853838(%edx,%esi,1),%edx + subl %eax, %edi + roll $11, %edx + addl %ecx, %edx + /* 68 */ + movl 32(%esp), %esi + orl %ebp, %edi + addl %esi, %ecx + xorl %edx, %edi + movl $-1, %esi + roll $10, %ebp + leal 2840853838(%ecx,%edi,1),%ecx + subl %ebp, %esi + roll $6, %ecx + addl %ebx, %ecx + /* 69 */ + movl 52(%esp), %edi + orl %edx, %esi + addl %edi, %ebx + xorl %ecx, %esi + movl $-1, %edi + roll $10, %edx + leal 2840853838(%ebx,%esi,1),%ebx + subl %edx, %edi + roll $8, %ebx + addl %eax, %ebx + /* 70 */ + movl 12(%esp), %esi + orl %ecx, %edi + addl %esi, %eax + xorl %ebx, %edi + movl $-1, %esi + roll $10, %ecx + leal 2840853838(%eax,%edi,1),%eax + subl %ecx, %esi + roll $13, %eax + addl %ebp, %eax + /* 71 */ + movl 44(%esp), %edi + orl %ebx, %esi + addl %edi, %ebp + xorl %eax, %esi + movl $-1, %edi + roll $10, %ebx + leal 2840853838(%ebp,%esi,1),%ebp + subl %ebx, %edi + roll $12, %ebp + addl %edx, %ebp + /* 72 */ + movl 60(%esp), %esi + orl %eax, %edi + addl %esi, %edx + xorl %ebp, %edi + movl $-1, %esi + roll $10, %eax + leal 2840853838(%edx,%edi,1),%edx + subl %eax, %esi + roll $5, %edx + addl %ecx, %edx + /* 73 */ + movl 8(%esp), %edi + orl %ebp, %esi + addl %edi, %ecx + xorl %edx, %esi + movl $-1, %edi + roll $10, %ebp + leal 2840853838(%ecx,%esi,1),%ecx + subl %ebp, %edi + roll $12, %ecx + addl %ebx, %ecx + /* 74 */ + movl 16(%esp), %esi + orl %edx, %edi + addl %esi, %ebx + xorl %ecx, %edi + movl $-1, %esi + roll $10, %edx + leal 2840853838(%ebx,%edi,1),%ebx + subl %edx, %esi + roll $13, %ebx + addl %eax, %ebx + /* 75 */ + movl 36(%esp), %edi + orl %ecx, %esi + addl %edi, %eax + xorl %ebx, %esi + movl $-1, %edi + roll $10, %ecx + leal 2840853838(%eax,%esi,1),%eax + subl %ecx, %edi + roll $14, %eax + addl %ebp, %eax + /* 76 */ + movl 48(%esp), %esi + orl %ebx, %edi + addl %esi, %ebp + xorl %eax, %edi + movl $-1, %esi + roll $10, %ebx + leal 2840853838(%ebp,%edi,1),%ebp + subl %ebx, %esi + roll $11, %ebp + addl %edx, %ebp + /* 77 */ + movl 28(%esp), %edi + orl %eax, %esi + addl %edi, %edx + xorl %ebp, %esi + movl $-1, %edi + roll $10, %eax + leal 2840853838(%edx,%esi,1),%edx + subl %eax, %edi + roll $8, %edx + addl %ecx, %edx + /* 78 */ + movl 64(%esp), %esi + orl %ebp, %edi + addl %esi, %ecx + xorl %edx, %edi + movl $-1, %esi + roll $10, %ebp + leal 2840853838(%ecx,%edi,1),%ecx + subl %ebp, %esi + roll $5, %ecx + addl %ebx, %ecx + /* 79 */ + movl 56(%esp), %edi + orl %edx, %esi + addl %edi, %ebx + xorl %ecx, %esi + movl 108(%esp), %edi + roll $10, %edx + leal 2840853838(%ebx,%esi,1),%ebx + movl %eax, 68(%esp) + roll $6, %ebx + addl %eax, %ebx + movl (%edi), %eax + movl %ebx, 72(%esp) + movl %ecx, 76(%esp) + movl 4(%edi), %ebx + movl %edx, 80(%esp) + movl 8(%edi), %ecx + movl %ebp, 84(%esp) + movl 12(%edi), %edx + movl 16(%edi), %ebp + /* 80 */ + movl $-1, %edi + subl %edx, %edi + movl 24(%esp), %esi + orl %ecx, %edi + addl %esi, %eax + xorl %ebx, %edi + movl $-1, %esi + roll $10, %ecx + leal 1352829926(%eax,%edi,1),%eax + subl %ecx, %esi + roll $8, %eax + addl %ebp, %eax + /* 81 */ + movl 60(%esp), %edi + orl %ebx, %esi + addl %edi, %ebp + xorl %eax, %esi + movl $-1, %edi + roll $10, %ebx + leal 1352829926(%ebp,%esi,1),%ebp + subl %ebx, %edi + roll $9, %ebp + addl %edx, %ebp + /* 82 */ + movl 32(%esp), %esi + orl %eax, %edi + addl %esi, %edx + xorl %ebp, %edi + movl $-1, %esi + roll $10, %eax + leal 1352829926(%edx,%edi,1),%edx + subl %eax, %esi + roll $9, %edx + addl %ecx, %edx + /* 83 */ + movl 4(%esp), %edi + orl %ebp, %esi + addl %edi, %ecx + xorl %edx, %esi + movl $-1, %edi + roll $10, %ebp + leal 1352829926(%ecx,%esi,1),%ecx + subl %ebp, %edi + roll $11, %ecx + addl %ebx, %ecx + /* 84 */ + movl 40(%esp), %esi + orl %edx, %edi + addl %esi, %ebx + xorl %ecx, %edi + movl $-1, %esi + roll $10, %edx + leal 1352829926(%ebx,%edi,1),%ebx + subl %edx, %esi + roll $13, %ebx + addl %eax, %ebx + /* 85 */ + movl 12(%esp), %edi + orl %ecx, %esi + addl %edi, %eax + xorl %ebx, %esi + movl $-1, %edi + roll $10, %ecx + leal 1352829926(%eax,%esi,1),%eax + subl %ecx, %edi + roll $15, %eax + addl %ebp, %eax + /* 86 */ + movl 48(%esp), %esi + orl %ebx, %edi + addl %esi, %ebp + xorl %eax, %edi + movl $-1, %esi + roll $10, %ebx + leal 1352829926(%ebp,%edi,1),%ebp + subl %ebx, %esi + roll $15, %ebp + addl %edx, %ebp + /* 87 */ + movl 20(%esp), %edi + orl %eax, %esi + addl %edi, %edx + xorl %ebp, %esi + movl $-1, %edi + roll $10, %eax + leal 1352829926(%edx,%esi,1),%edx + subl %eax, %edi + roll $5, %edx + addl %ecx, %edx + /* 88 */ + movl 56(%esp), %esi + orl %ebp, %edi + addl %esi, %ecx + xorl %edx, %edi + movl $-1, %esi + roll $10, %ebp + leal 1352829926(%ecx,%edi,1),%ecx + subl %ebp, %esi + roll $7, %ecx + addl %ebx, %ecx + /* 89 */ + movl 28(%esp), %edi + orl %edx, %esi + addl %edi, %ebx + xorl %ecx, %esi + movl $-1, %edi + roll $10, %edx + leal 1352829926(%ebx,%esi,1),%ebx + subl %edx, %edi + roll $7, %ebx + addl %eax, %ebx + /* 90 */ + movl 64(%esp), %esi + orl %ecx, %edi + addl %esi, %eax + xorl %ebx, %edi + movl $-1, %esi + roll $10, %ecx + leal 1352829926(%eax,%edi,1),%eax + subl %ecx, %esi + roll $8, %eax + addl %ebp, %eax + /* 91 */ + movl 36(%esp), %edi + orl %ebx, %esi + addl %edi, %ebp + xorl %eax, %esi + movl $-1, %edi + roll $10, %ebx + leal 1352829926(%ebp,%esi,1),%ebp + subl %ebx, %edi + roll $11, %ebp + addl %edx, %ebp + /* 92 */ + movl 8(%esp), %esi + orl %eax, %edi + addl %esi, %edx + xorl %ebp, %edi + movl $-1, %esi + roll $10, %eax + leal 1352829926(%edx,%edi,1),%edx + subl %eax, %esi + roll $14, %edx + addl %ecx, %edx + /* 93 */ + movl 44(%esp), %edi + orl %ebp, %esi + addl %edi, %ecx + xorl %edx, %esi + movl $-1, %edi + roll $10, %ebp + leal 1352829926(%ecx,%esi,1),%ecx + subl %ebp, %edi + roll $14, %ecx + addl %ebx, %ecx + /* 94 */ + movl 16(%esp), %esi + orl %edx, %edi + addl %esi, %ebx + xorl %ecx, %edi + movl $-1, %esi + roll $10, %edx + leal 1352829926(%ebx,%edi,1),%ebx + subl %edx, %esi + roll $12, %ebx + addl %eax, %ebx + /* 95 */ + movl 52(%esp), %edi + orl %ecx, %esi + addl %edi, %eax + xorl %ebx, %esi + movl $-1, %edi + roll $10, %ecx + leal 1352829926(%eax,%esi,1),%eax + movl %ecx, %esi + roll $6, %eax + addl %ebp, %eax + /* 96 */ + subl %ecx, %edi + andl %eax, %esi + andl %ebx, %edi + orl %esi, %edi + movl 28(%esp), %esi + roll $10, %ebx + leal 1548603684(%ebp,%edi,),%ebp + movl $-1, %edi + addl %esi, %ebp + movl %ebx, %esi + roll $9, %ebp + addl %edx, %ebp + /* 97 */ + subl %ebx, %edi + andl %ebp, %esi + andl %eax, %edi + orl %esi, %edi + movl 48(%esp), %esi + roll $10, %eax + leal 1548603684(%edx,%edi,),%edx + movl $-1, %edi + addl %esi, %edx + movl %eax, %esi + roll $13, %edx + addl %ecx, %edx + /* 98 */ + subl %eax, %edi + andl %edx, %esi + andl %ebp, %edi + orl %esi, %edi + movl 16(%esp), %esi + roll $10, %ebp + leal 1548603684(%ecx,%edi,),%ecx + movl $-1, %edi + addl %esi, %ecx + movl %ebp, %esi + roll $15, %ecx + addl %ebx, %ecx + /* 99 */ + subl %ebp, %edi + andl %ecx, %esi + andl %edx, %edi + orl %esi, %edi + movl 32(%esp), %esi + roll $10, %edx + leal 1548603684(%ebx,%edi,),%ebx + movl $-1, %edi + addl %esi, %ebx + movl %edx, %esi + roll $7, %ebx + addl %eax, %ebx + /* 100 */ + subl %edx, %edi + andl %ebx, %esi + andl %ecx, %edi + orl %esi, %edi + movl 4(%esp), %esi + roll $10, %ecx + leal 1548603684(%eax,%edi,),%eax + movl $-1, %edi + addl %esi, %eax + movl %ecx, %esi + roll $12, %eax + addl %ebp, %eax + /* 101 */ + subl %ecx, %edi + andl %eax, %esi + andl %ebx, %edi + orl %esi, %edi + movl 56(%esp), %esi + roll $10, %ebx + leal 1548603684(%ebp,%edi,),%ebp + movl $-1, %edi + addl %esi, %ebp + movl %ebx, %esi + roll $8, %ebp + addl %edx, %ebp + /* 102 */ + subl %ebx, %edi + andl %ebp, %esi + andl %eax, %edi + orl %esi, %edi + movl 24(%esp), %esi + roll $10, %eax + leal 1548603684(%edx,%edi,),%edx + movl $-1, %edi + addl %esi, %edx + movl %eax, %esi + roll $9, %edx + addl %ecx, %edx + /* 103 */ + subl %eax, %edi + andl %edx, %esi + andl %ebp, %edi + orl %esi, %edi + movl 44(%esp), %esi + roll $10, %ebp + leal 1548603684(%ecx,%edi,),%ecx + movl $-1, %edi + addl %esi, %ecx + movl %ebp, %esi + roll $11, %ecx + addl %ebx, %ecx + /* 104 */ + subl %ebp, %edi + andl %ecx, %esi + andl %edx, %edi + orl %esi, %edi + movl 60(%esp), %esi + roll $10, %edx + leal 1548603684(%ebx,%edi,),%ebx + movl $-1, %edi + addl %esi, %ebx + movl %edx, %esi + roll $7, %ebx + addl %eax, %ebx + /* 105 */ + subl %edx, %edi + andl %ebx, %esi + andl %ecx, %edi + orl %esi, %edi + movl 64(%esp), %esi + roll $10, %ecx + leal 1548603684(%eax,%edi,),%eax + movl $-1, %edi + addl %esi, %eax + movl %ecx, %esi + roll $7, %eax + addl %ebp, %eax + /* 106 */ + subl %ecx, %edi + andl %eax, %esi + andl %ebx, %edi + orl %esi, %edi + movl 36(%esp), %esi + roll $10, %ebx + leal 1548603684(%ebp,%edi,),%ebp + movl $-1, %edi + addl %esi, %ebp + movl %ebx, %esi + roll $12, %ebp + addl %edx, %ebp + /* 107 */ + subl %ebx, %edi + andl %ebp, %esi + andl %eax, %edi + orl %esi, %edi + movl 52(%esp), %esi + roll $10, %eax + leal 1548603684(%edx,%edi,),%edx + movl $-1, %edi + addl %esi, %edx + movl %eax, %esi + roll $7, %edx + addl %ecx, %edx + /* 108 */ + subl %eax, %edi + andl %edx, %esi + andl %ebp, %edi + orl %esi, %edi + movl 20(%esp), %esi + roll $10, %ebp + leal 1548603684(%ecx,%edi,),%ecx + movl $-1, %edi + addl %esi, %ecx + movl %ebp, %esi + roll $6, %ecx + addl %ebx, %ecx + /* 109 */ + subl %ebp, %edi + andl %ecx, %esi + andl %edx, %edi + orl %esi, %edi + movl 40(%esp), %esi + roll $10, %edx + leal 1548603684(%ebx,%edi,),%ebx + movl $-1, %edi + addl %esi, %ebx + movl %edx, %esi + roll $15, %ebx + addl %eax, %ebx + /* 110 */ + subl %edx, %edi + andl %ebx, %esi + andl %ecx, %edi + orl %esi, %edi + movl 8(%esp), %esi + roll $10, %ecx + leal 1548603684(%eax,%edi,),%eax + movl $-1, %edi + addl %esi, %eax + movl %ecx, %esi + roll $13, %eax + addl %ebp, %eax + /* 111 */ + subl %ecx, %edi + andl %eax, %esi + andl %ebx, %edi + orl %esi, %edi + movl 12(%esp), %esi + roll $10, %ebx + leal 1548603684(%ebp,%edi,),%ebp + movl $-1, %edi + addl %esi, %ebp + subl %eax, %edi + roll $11, %ebp + addl %edx, %ebp + /* 112 */ + movl 64(%esp), %esi + orl %ebp, %edi + addl %esi, %edx + xorl %ebx, %edi + movl $-1, %esi + roll $10, %eax + leal 1836072691(%edx,%edi,1),%edx + subl %ebp, %esi + roll $9, %edx + addl %ecx, %edx + /* 113 */ + movl 24(%esp), %edi + orl %edx, %esi + addl %edi, %ecx + xorl %eax, %esi + movl $-1, %edi + roll $10, %ebp + leal 1836072691(%ecx,%esi,1),%ecx + subl %edx, %edi + roll $7, %ecx + addl %ebx, %ecx + /* 114 */ + movl 8(%esp), %esi + orl %ecx, %edi + addl %esi, %ebx + xorl %ebp, %edi + movl $-1, %esi + roll $10, %edx + leal 1836072691(%ebx,%edi,1),%ebx + subl %ecx, %esi + roll $15, %ebx + addl %eax, %ebx + /* 115 */ + movl 16(%esp), %edi + orl %ebx, %esi + addl %edi, %eax + xorl %edx, %esi + movl $-1, %edi + roll $10, %ecx + leal 1836072691(%eax,%esi,1),%eax + subl %ebx, %edi + roll $11, %eax + addl %ebp, %eax + /* 116 */ + movl 32(%esp), %esi + orl %eax, %edi + addl %esi, %ebp + xorl %ecx, %edi + movl $-1, %esi + roll $10, %ebx + leal 1836072691(%ebp,%edi,1),%ebp + subl %eax, %esi + roll $8, %ebp + addl %edx, %ebp + /* 117 */ + movl 60(%esp), %edi + orl %ebp, %esi + addl %edi, %edx + xorl %ebx, %esi + movl $-1, %edi + roll $10, %eax + leal 1836072691(%edx,%esi,1),%edx + subl %ebp, %edi + roll $6, %edx + addl %ecx, %edx + /* 118 */ + movl 28(%esp), %esi + orl %edx, %edi + addl %esi, %ecx + xorl %eax, %edi + movl $-1, %esi + roll $10, %ebp + leal 1836072691(%ecx,%edi,1),%ecx + subl %edx, %esi + roll $6, %ecx + addl %ebx, %ecx + /* 119 */ + movl 40(%esp), %edi + orl %ecx, %esi + addl %edi, %ebx + xorl %ebp, %esi + movl $-1, %edi + roll $10, %edx + leal 1836072691(%ebx,%esi,1),%ebx + subl %ecx, %edi + roll $14, %ebx + addl %eax, %ebx + /* 120 */ + movl 48(%esp), %esi + orl %ebx, %edi + addl %esi, %eax + xorl %edx, %edi + movl $-1, %esi + roll $10, %ecx + leal 1836072691(%eax,%edi,1),%eax + subl %ebx, %esi + roll $12, %eax + addl %ebp, %eax + /* 121 */ + movl 36(%esp), %edi + orl %eax, %esi + addl %edi, %ebp + xorl %ecx, %esi + movl $-1, %edi + roll $10, %ebx + leal 1836072691(%ebp,%esi,1),%ebp + subl %eax, %edi + roll $13, %ebp + addl %edx, %ebp + /* 122 */ + movl 52(%esp), %esi + orl %ebp, %edi + addl %esi, %edx + xorl %ebx, %edi + movl $-1, %esi + roll $10, %eax + leal 1836072691(%edx,%edi,1),%edx + subl %ebp, %esi + roll $5, %edx + addl %ecx, %edx + /* 123 */ + movl 12(%esp), %edi + orl %edx, %esi + addl %edi, %ecx + xorl %eax, %esi + movl $-1, %edi + roll $10, %ebp + leal 1836072691(%ecx,%esi,1),%ecx + subl %edx, %edi + roll $14, %ecx + addl %ebx, %ecx + /* 124 */ + movl 44(%esp), %esi + orl %ecx, %edi + addl %esi, %ebx + xorl %ebp, %edi + movl $-1, %esi + roll $10, %edx + leal 1836072691(%ebx,%edi,1),%ebx + subl %ecx, %esi + roll $13, %ebx + addl %eax, %ebx + /* 125 */ + movl 4(%esp), %edi + orl %ebx, %esi + addl %edi, %eax + xorl %edx, %esi + movl $-1, %edi + roll $10, %ecx + leal 1836072691(%eax,%esi,1),%eax + subl %ebx, %edi + roll $13, %eax + addl %ebp, %eax + /* 126 */ + movl 20(%esp), %esi + orl %eax, %edi + addl %esi, %ebp + xorl %ecx, %edi + movl $-1, %esi + roll $10, %ebx + leal 1836072691(%ebp,%edi,1),%ebp + subl %eax, %esi + roll $7, %ebp + addl %edx, %ebp + /* 127 */ + movl 56(%esp), %edi + orl %ebp, %esi + addl %edi, %edx + xorl %ebx, %esi + movl 36(%esp), %edi + roll $10, %eax + leal 1836072691(%edx,%esi,1),%edx + movl $-1, %esi + roll $5, %edx + addl %ecx, %edx + /* 128 */ + addl %edi, %ecx + movl %ebp, %edi + subl %edx, %esi + andl %edx, %edi + andl %eax, %esi + orl %esi, %edi + movl 28(%esp), %esi + roll $10, %ebp + leal 2053994217(%ecx,%edi,1),%ecx + movl $-1, %edi + roll $15, %ecx + addl %ebx, %ecx + /* 129 */ + addl %esi, %ebx + movl %edx, %esi + subl %ecx, %edi + andl %ecx, %esi + andl %ebp, %edi + orl %edi, %esi + movl 20(%esp), %edi + roll $10, %edx + leal 2053994217(%ebx,%esi,1),%ebx + movl $-1, %esi + roll $5, %ebx + addl %eax, %ebx + /* 130 */ + addl %edi, %eax + movl %ecx, %edi + subl %ebx, %esi + andl %ebx, %edi + andl %edx, %esi + orl %esi, %edi + movl 8(%esp), %esi + roll $10, %ecx + leal 2053994217(%eax,%edi,1),%eax + movl $-1, %edi + roll $8, %eax + addl %ebp, %eax + /* 131 */ + addl %esi, %ebp + movl %ebx, %esi + subl %eax, %edi + andl %eax, %esi + andl %ecx, %edi + orl %edi, %esi + movl 16(%esp), %edi + roll $10, %ebx + leal 2053994217(%ebp,%esi,1),%ebp + movl $-1, %esi + roll $11, %ebp + addl %edx, %ebp + /* 132 */ + addl %edi, %edx + movl %eax, %edi + subl %ebp, %esi + andl %ebp, %edi + andl %ebx, %esi + orl %esi, %edi + movl 48(%esp), %esi + roll $10, %eax + leal 2053994217(%edx,%edi,1),%edx + movl $-1, %edi + roll $14, %edx + addl %ecx, %edx + /* 133 */ + addl %esi, %ecx + movl %ebp, %esi + subl %edx, %edi + andl %edx, %esi + andl %eax, %edi + orl %edi, %esi + movl 64(%esp), %edi + roll $10, %ebp + leal 2053994217(%ecx,%esi,1),%ecx + movl $-1, %esi + roll $14, %ecx + addl %ebx, %ecx + /* 134 */ + addl %edi, %ebx + movl %edx, %edi + subl %ecx, %esi + andl %ecx, %edi + andl %ebp, %esi + orl %esi, %edi + movl 4(%esp), %esi + roll $10, %edx + leal 2053994217(%ebx,%edi,1),%ebx + movl $-1, %edi + roll $6, %ebx + addl %eax, %ebx + /* 135 */ + addl %esi, %eax + movl %ecx, %esi + subl %ebx, %edi + andl %ebx, %esi + andl %edx, %edi + orl %edi, %esi + movl 24(%esp), %edi + roll $10, %ecx + leal 2053994217(%eax,%esi,1),%eax + movl $-1, %esi + roll $14, %eax + addl %ebp, %eax + /* 136 */ + addl %edi, %ebp + movl %ebx, %edi + subl %eax, %esi + andl %eax, %edi + andl %ecx, %esi + orl %esi, %edi + movl 52(%esp), %esi + roll $10, %ebx + leal 2053994217(%ebp,%edi,1),%ebp + movl $-1, %edi + roll $6, %ebp + addl %edx, %ebp + /* 137 */ + addl %esi, %edx + movl %eax, %esi + subl %ebp, %edi + andl %ebp, %esi + andl %ebx, %edi + orl %edi, %esi + movl 12(%esp), %edi + roll $10, %eax + leal 2053994217(%edx,%esi,1),%edx + movl $-1, %esi + roll $9, %edx + addl %ecx, %edx + /* 138 */ + addl %edi, %ecx + movl %ebp, %edi + subl %edx, %esi + andl %edx, %edi + andl %eax, %esi + orl %esi, %edi + movl 56(%esp), %esi + roll $10, %ebp + leal 2053994217(%ecx,%edi,1),%ecx + movl $-1, %edi + roll $12, %ecx + addl %ebx, %ecx + /* 139 */ + addl %esi, %ebx + movl %edx, %esi + subl %ecx, %edi + andl %ecx, %esi + andl %ebp, %edi + orl %edi, %esi + movl 40(%esp), %edi + roll $10, %edx + leal 2053994217(%ebx,%esi,1),%ebx + movl $-1, %esi + roll $9, %ebx + addl %eax, %ebx + /* 140 */ + addl %edi, %eax + movl %ecx, %edi + subl %ebx, %esi + andl %ebx, %edi + andl %edx, %esi + orl %esi, %edi + movl 32(%esp), %esi + roll $10, %ecx + leal 2053994217(%eax,%edi,1),%eax + movl $-1, %edi + roll $12, %eax + addl %ebp, %eax + /* 141 */ + addl %esi, %ebp + movl %ebx, %esi + subl %eax, %edi + andl %eax, %esi + andl %ecx, %edi + orl %edi, %esi + movl 44(%esp), %edi + roll $10, %ebx + leal 2053994217(%ebp,%esi,1),%ebp + movl $-1, %esi + roll $5, %ebp + addl %edx, %ebp + /* 142 */ + addl %edi, %edx + movl %eax, %edi + subl %ebp, %esi + andl %ebp, %edi + andl %ebx, %esi + orl %esi, %edi + movl 60(%esp), %esi + roll $10, %eax + leal 2053994217(%edx,%edi,1),%edx + movl $-1, %edi + roll $15, %edx + addl %ecx, %edx + /* 143 */ + addl %esi, %ecx + movl %ebp, %esi + subl %edx, %edi + andl %edx, %esi + andl %eax, %edi + orl %esi, %edi + movl %edx, %esi + roll $10, %ebp + leal 2053994217(%ecx,%edi,1),%ecx + xorl %ebp, %esi + roll $8, %ecx + addl %ebx, %ecx + /* 144 */ + movl 52(%esp), %edi + xorl %ecx, %esi + addl %edi, %ebx + roll $10, %edx + addl %esi, %ebx + movl %ecx, %esi + roll $8, %ebx + addl %eax, %ebx + /* 145 */ + xorl %edx, %esi + movl 64(%esp), %edi + xorl %ebx, %esi + addl %esi, %eax + movl %ebx, %esi + roll $10, %ecx + addl %edi, %eax + xorl %ecx, %esi + roll $5, %eax + addl %ebp, %eax + /* 146 */ + movl 44(%esp), %edi + xorl %eax, %esi + addl %edi, %ebp + roll $10, %ebx + addl %esi, %ebp + movl %eax, %esi + roll $12, %ebp + addl %edx, %ebp + /* 147 */ + xorl %ebx, %esi + movl 20(%esp), %edi + xorl %ebp, %esi + addl %esi, %edx + movl %ebp, %esi + roll $10, %eax + addl %edi, %edx + xorl %eax, %esi + roll $9, %edx + addl %ecx, %edx + /* 148 */ + movl 8(%esp), %edi + xorl %edx, %esi + addl %edi, %ecx + roll $10, %ebp + addl %esi, %ecx + movl %edx, %esi + roll $12, %ecx + addl %ebx, %ecx + /* 149 */ + xorl %ebp, %esi + movl 24(%esp), %edi + xorl %ecx, %esi + addl %esi, %ebx + movl %ecx, %esi + roll $10, %edx + addl %edi, %ebx + xorl %edx, %esi + roll $5, %ebx + addl %eax, %ebx + /* 150 */ + movl 36(%esp), %edi + xorl %ebx, %esi + addl %edi, %eax + roll $10, %ecx + addl %esi, %eax + movl %ebx, %esi + roll $14, %eax + addl %ebp, %eax + /* 151 */ + xorl %ecx, %esi + movl 32(%esp), %edi + xorl %eax, %esi + addl %esi, %ebp + movl %eax, %esi + roll $10, %ebx + addl %edi, %ebp + xorl %ebx, %esi + roll $6, %ebp + addl %edx, %ebp + /* 152 */ + movl 28(%esp), %edi + xorl %ebp, %esi + addl %edi, %edx + roll $10, %eax + addl %esi, %edx + movl %ebp, %esi + roll $8, %edx + addl %ecx, %edx + /* 153 */ + xorl %eax, %esi + movl 12(%esp), %edi + xorl %edx, %esi + addl %esi, %ecx + movl %edx, %esi + roll $10, %ebp + addl %edi, %ecx + xorl %ebp, %esi + roll $13, %ecx + addl %ebx, %ecx + /* 154 */ + movl 56(%esp), %edi + xorl %ecx, %esi + addl %edi, %ebx + roll $10, %edx + addl %esi, %ebx + movl %ecx, %esi + roll $6, %ebx + addl %eax, %ebx + /* 155 */ + xorl %edx, %esi + movl 60(%esp), %edi + xorl %ebx, %esi + addl %esi, %eax + movl %ebx, %esi + roll $10, %ecx + addl %edi, %eax + xorl %ecx, %esi + roll $5, %eax + addl %ebp, %eax + /* 156 */ + movl 4(%esp), %edi + xorl %eax, %esi + addl %edi, %ebp + roll $10, %ebx + addl %esi, %ebp + movl %eax, %esi + roll $15, %ebp + addl %edx, %ebp + /* 157 */ + xorl %ebx, %esi + movl 16(%esp), %edi + xorl %ebp, %esi + addl %esi, %edx + movl %ebp, %esi + roll $10, %eax + addl %edi, %edx + xorl %eax, %esi + roll $13, %edx + addl %ecx, %edx + /* 158 */ + movl 40(%esp), %edi + xorl %edx, %esi + addl %edi, %ecx + roll $10, %ebp + addl %esi, %ecx + movl %edx, %esi + roll $11, %ecx + addl %ebx, %ecx + /* 159 */ + xorl %ebp, %esi + movl 48(%esp), %edi + xorl %ecx, %esi + addl %esi, %ebx + roll $10, %edx + addl %edi, %ebx + movl 108(%esp), %edi + roll $11, %ebx + addl %eax, %ebx + movl 4(%edi), %esi + addl %esi, %edx + movl 76(%esp), %esi + addl %esi, %edx + movl 8(%edi), %esi + addl %esi, %ebp + movl 80(%esp), %esi + addl %esi, %ebp + movl 12(%edi), %esi + addl %esi, %eax + movl 84(%esp), %esi + addl %esi, %eax + movl 16(%edi), %esi + addl %esi, %ebx + movl 68(%esp), %esi + addl %esi, %ebx + movl (%edi), %esi + addl %esi, %ecx + movl 72(%esp), %esi + addl %esi, %ecx + movl %edx, (%edi) + movl %ebp, 4(%edi) + movl %eax, 8(%edi) + movl %ebx, 12(%edi) + movl %ecx, 16(%edi) + movl (%esp), %edi + movl 112(%esp), %esi + cmpl %esi, %edi + movl 108(%esp), %edi + jae .L000start + addl $88, %esp + popl %ebx + popl %ebp + popl %edi + popl %esi + ret +.ripemd160_block_x86_end: + SIZE(ripemd160_block_x86,.ripemd160_block_x86_end-ripemd160_block_x86) +.ident "desasm.pl" +#endif /* not PIC */ diff --git a/lib/libmd/i386/sha.S b/lib/libmd/i386/sha.S new file mode 100644 index 0000000..1e5201f --- /dev/null +++ b/lib/libmd/i386/sha.S @@ -0,0 +1,1952 @@ +/* $FreeBSD$ */ +/* -*- Fundamental -*- Emacs' assembler mode hoses this file */ +#ifndef PIC +/* Run the C pre-processor over this file with one of the following defined + * ELF - elf object files, + * OUT - a.out object files, + * BSDI - BSDI style a.out object files + * SOL - Solaris style elf + */ + +#define TYPE(a,b) .type a,b +#define SIZE(a,b) .size a,b + +#if defined(OUT) || defined(BSDI) +#define sha1_block_x86 _sha1_block_x86 + +#endif + +#ifdef OUT +#define OK 1 +#define ALIGN 4 +#endif + +#ifdef BSDI +#define OK 1 +#define ALIGN 4 +#undef SIZE +#undef TYPE +#define SIZE(a,b) +#define TYPE(a,b) +#endif + +#if defined(ELF) || defined(SOL) +#define OK 1 +#define ALIGN 4 +#endif + +#ifndef OK +You need to define one of +ELF - elf systems - linux-elf, NetBSD and DG-UX +OUT - a.out systems - linux-a.out and FreeBSD +SOL - solaris systems, which are elf with strange comment lines +BSDI - a.out with a very primative version of as. +#endif + +/* Let the Assembler begin :-) */ + /* Don't even think of reading this code */ + /* It was automatically generated by sha1-586.pl */ + /* Which is a perl program used to generate the x86 assember for */ + /* any of elf, a.out, BSDI,Win32, or Solaris */ + /* eric <eay@cryptsoft.com> */ + + .file "sha1-586.s" + .version "01.01" +gcc2_compiled.: +.text + .p2align ALIGN +.globl sha1_block_x86 + TYPE(sha1_block_x86,@function) +sha1_block_x86: + pushl %esi + pushl %ebp + movl 20(%esp), %eax + movl 16(%esp), %esi + addl %esi, %eax + movl 12(%esp), %ebp + pushl %ebx + subl $64, %eax + pushl %edi + movl 4(%ebp), %ebx + subl $72, %esp + movl 12(%ebp), %edx + movl 16(%ebp), %edi + movl 8(%ebp), %ecx + movl %eax, 68(%esp) + /* First we need to setup the X array */ + movl (%esi), %eax +.L000start: + /* First, load the words onto the stack in network byte order */ +.byte 15 +.byte 200 /* bswapl %eax */ + movl %eax, (%esp) + movl 4(%esi), %eax +.byte 15 +.byte 200 /* bswapl %eax */ + movl %eax, 4(%esp) + movl 8(%esi), %eax +.byte 15 +.byte 200 /* bswapl %eax */ + movl %eax, 8(%esp) + movl 12(%esi), %eax +.byte 15 +.byte 200 /* bswapl %eax */ + movl %eax, 12(%esp) + movl 16(%esi), %eax +.byte 15 +.byte 200 /* bswapl %eax */ + movl %eax, 16(%esp) + movl 20(%esi), %eax +.byte 15 +.byte 200 /* bswapl %eax */ + movl %eax, 20(%esp) + movl 24(%esi), %eax +.byte 15 +.byte 200 /* bswapl %eax */ + movl %eax, 24(%esp) + movl 28(%esi), %eax +.byte 15 +.byte 200 /* bswapl %eax */ + movl %eax, 28(%esp) + movl 32(%esi), %eax +.byte 15 +.byte 200 /* bswapl %eax */ + movl %eax, 32(%esp) + movl 36(%esi), %eax +.byte 15 +.byte 200 /* bswapl %eax */ + movl %eax, 36(%esp) + movl 40(%esi), %eax +.byte 15 +.byte 200 /* bswapl %eax */ + movl %eax, 40(%esp) + movl 44(%esi), %eax +.byte 15 +.byte 200 /* bswapl %eax */ + movl %eax, 44(%esp) + movl 48(%esi), %eax +.byte 15 +.byte 200 /* bswapl %eax */ + movl %eax, 48(%esp) + movl 52(%esi), %eax +.byte 15 +.byte 200 /* bswapl %eax */ + movl %eax, 52(%esp) + movl 56(%esi), %eax +.byte 15 +.byte 200 /* bswapl %eax */ + movl %eax, 56(%esp) + movl 60(%esi), %eax +.byte 15 +.byte 200 /* bswapl %eax */ + movl %eax, 60(%esp) + /* We now have the X array on the stack */ + /* starting at sp-4 */ + movl %esi, 64(%esp) + + /* Start processing */ + movl (%ebp), %eax + /* 00_15 0 */ + movl %ecx, %esi + movl %eax, %ebp + xorl %edx, %esi + roll $5, %ebp + andl %ebx, %esi + addl %edi, %ebp +.byte 209 +.byte 203 /* rorl $1 %ebx */ + movl (%esp), %edi +.byte 209 +.byte 203 /* rorl $1 %ebx */ + xorl %edx, %esi + leal 1518500249(%ebp,%edi,1),%ebp + movl %ebx, %edi + addl %ebp, %esi + xorl %ecx, %edi + movl %esi, %ebp + andl %eax, %edi + roll $5, %ebp + addl %edx, %ebp + movl 4(%esp), %edx +.byte 209 +.byte 200 /* rorl $1 %eax */ + xorl %ecx, %edi +.byte 209 +.byte 200 /* rorl $1 %eax */ + leal 1518500249(%ebp,%edx,1),%ebp + addl %ebp, %edi + /* 00_15 2 */ + movl %eax, %edx + movl %edi, %ebp + xorl %ebx, %edx + roll $5, %ebp + andl %esi, %edx + addl %ecx, %ebp +.byte 209 +.byte 206 /* rorl $1 %esi */ + movl 8(%esp), %ecx +.byte 209 +.byte 206 /* rorl $1 %esi */ + xorl %ebx, %edx + leal 1518500249(%ebp,%ecx,1),%ebp + movl %esi, %ecx + addl %ebp, %edx + xorl %eax, %ecx + movl %edx, %ebp + andl %edi, %ecx + roll $5, %ebp + addl %ebx, %ebp + movl 12(%esp), %ebx +.byte 209 +.byte 207 /* rorl $1 %edi */ + xorl %eax, %ecx +.byte 209 +.byte 207 /* rorl $1 %edi */ + leal 1518500249(%ebp,%ebx,1),%ebp + addl %ebp, %ecx + /* 00_15 4 */ + movl %edi, %ebx + movl %ecx, %ebp + xorl %esi, %ebx + roll $5, %ebp + andl %edx, %ebx + addl %eax, %ebp +.byte 209 +.byte 202 /* rorl $1 %edx */ + movl 16(%esp), %eax +.byte 209 +.byte 202 /* rorl $1 %edx */ + xorl %esi, %ebx + leal 1518500249(%ebp,%eax,1),%ebp + movl %edx, %eax + addl %ebp, %ebx + xorl %edi, %eax + movl %ebx, %ebp + andl %ecx, %eax + roll $5, %ebp + addl %esi, %ebp + movl 20(%esp), %esi +.byte 209 +.byte 201 /* rorl $1 %ecx */ + xorl %edi, %eax +.byte 209 +.byte 201 /* rorl $1 %ecx */ + leal 1518500249(%ebp,%esi,1),%ebp + addl %ebp, %eax + /* 00_15 6 */ + movl %ecx, %esi + movl %eax, %ebp + xorl %edx, %esi + roll $5, %ebp + andl %ebx, %esi + addl %edi, %ebp +.byte 209 +.byte 203 /* rorl $1 %ebx */ + movl 24(%esp), %edi +.byte 209 +.byte 203 /* rorl $1 %ebx */ + xorl %edx, %esi + leal 1518500249(%ebp,%edi,1),%ebp + movl %ebx, %edi + addl %ebp, %esi + xorl %ecx, %edi + movl %esi, %ebp + andl %eax, %edi + roll $5, %ebp + addl %edx, %ebp + movl 28(%esp), %edx +.byte 209 +.byte 200 /* rorl $1 %eax */ + xorl %ecx, %edi +.byte 209 +.byte 200 /* rorl $1 %eax */ + leal 1518500249(%ebp,%edx,1),%ebp + addl %ebp, %edi + /* 00_15 8 */ + movl %eax, %edx + movl %edi, %ebp + xorl %ebx, %edx + roll $5, %ebp + andl %esi, %edx + addl %ecx, %ebp +.byte 209 +.byte 206 /* rorl $1 %esi */ + movl 32(%esp), %ecx +.byte 209 +.byte 206 /* rorl $1 %esi */ + xorl %ebx, %edx + leal 1518500249(%ebp,%ecx,1),%ebp + movl %esi, %ecx + addl %ebp, %edx + xorl %eax, %ecx + movl %edx, %ebp + andl %edi, %ecx + roll $5, %ebp + addl %ebx, %ebp + movl 36(%esp), %ebx +.byte 209 +.byte 207 /* rorl $1 %edi */ + xorl %eax, %ecx +.byte 209 +.byte 207 /* rorl $1 %edi */ + leal 1518500249(%ebp,%ebx,1),%ebp + addl %ebp, %ecx + /* 00_15 10 */ + movl %edi, %ebx + movl %ecx, %ebp + xorl %esi, %ebx + roll $5, %ebp + andl %edx, %ebx + addl %eax, %ebp +.byte 209 +.byte 202 /* rorl $1 %edx */ + movl 40(%esp), %eax +.byte 209 +.byte 202 /* rorl $1 %edx */ + xorl %esi, %ebx + leal 1518500249(%ebp,%eax,1),%ebp + movl %edx, %eax + addl %ebp, %ebx + xorl %edi, %eax + movl %ebx, %ebp + andl %ecx, %eax + roll $5, %ebp + addl %esi, %ebp + movl 44(%esp), %esi +.byte 209 +.byte 201 /* rorl $1 %ecx */ + xorl %edi, %eax +.byte 209 +.byte 201 /* rorl $1 %ecx */ + leal 1518500249(%ebp,%esi,1),%ebp + addl %ebp, %eax + /* 00_15 12 */ + movl %ecx, %esi + movl %eax, %ebp + xorl %edx, %esi + roll $5, %ebp + andl %ebx, %esi + addl %edi, %ebp +.byte 209 +.byte 203 /* rorl $1 %ebx */ + movl 48(%esp), %edi +.byte 209 +.byte 203 /* rorl $1 %ebx */ + xorl %edx, %esi + leal 1518500249(%ebp,%edi,1),%ebp + movl %ebx, %edi + addl %ebp, %esi + xorl %ecx, %edi + movl %esi, %ebp + andl %eax, %edi + roll $5, %ebp + addl %edx, %ebp + movl 52(%esp), %edx +.byte 209 +.byte 200 /* rorl $1 %eax */ + xorl %ecx, %edi +.byte 209 +.byte 200 /* rorl $1 %eax */ + leal 1518500249(%ebp,%edx,1),%ebp + addl %ebp, %edi + /* 00_15 14 */ + movl %eax, %edx + movl %edi, %ebp + xorl %ebx, %edx + roll $5, %ebp + andl %esi, %edx + addl %ecx, %ebp +.byte 209 +.byte 206 /* rorl $1 %esi */ + movl 56(%esp), %ecx +.byte 209 +.byte 206 /* rorl $1 %esi */ + xorl %ebx, %edx + leal 1518500249(%ebp,%ecx,1),%ebp + movl %esi, %ecx + addl %ebp, %edx + xorl %eax, %ecx + movl %edx, %ebp + andl %edi, %ecx + roll $5, %ebp + addl %ebx, %ebp + movl 60(%esp), %ebx +.byte 209 +.byte 207 /* rorl $1 %edi */ + xorl %eax, %ecx +.byte 209 +.byte 207 /* rorl $1 %edi */ + leal 1518500249(%ebp,%ebx,1),%ebp + addl %ebp, %ecx + /* 16_19 16 */ + nop + movl (%esp), %ebp + movl 8(%esp), %ebx + xorl %ebp, %ebx + movl 32(%esp), %ebp + xorl %ebp, %ebx + movl 52(%esp), %ebp + xorl %ebp, %ebx + movl %edi, %ebp +.byte 209 +.byte 195 /* roll $1 %ebx */ + xorl %esi, %ebp + movl %ebx, (%esp) + andl %edx, %ebp + leal 1518500249(%ebx,%eax,1),%ebx + xorl %esi, %ebp + movl %ecx, %eax + addl %ebp, %ebx + roll $5, %eax +.byte 209 +.byte 202 /* rorl $1 %edx */ + addl %eax, %ebx + movl 4(%esp), %eax + movl 12(%esp), %ebp + xorl %ebp, %eax + movl 36(%esp), %ebp + xorl %ebp, %eax + movl 56(%esp), %ebp +.byte 209 +.byte 202 /* rorl $1 %edx */ + xorl %ebp, %eax +.byte 209 +.byte 192 /* roll $1 %eax */ + movl %edx, %ebp + xorl %edi, %ebp + movl %eax, 4(%esp) + andl %ecx, %ebp + leal 1518500249(%eax,%esi,1),%eax + xorl %edi, %ebp + movl %ebx, %esi + roll $5, %esi +.byte 209 +.byte 201 /* rorl $1 %ecx */ + addl %esi, %eax +.byte 209 +.byte 201 /* rorl $1 %ecx */ + addl %ebp, %eax + /* 16_19 18 */ + movl 8(%esp), %ebp + movl 16(%esp), %esi + xorl %ebp, %esi + movl 40(%esp), %ebp + xorl %ebp, %esi + movl 60(%esp), %ebp + xorl %ebp, %esi + movl %ecx, %ebp +.byte 209 +.byte 198 /* roll $1 %esi */ + xorl %edx, %ebp + movl %esi, 8(%esp) + andl %ebx, %ebp + leal 1518500249(%esi,%edi,1),%esi + xorl %edx, %ebp + movl %eax, %edi + addl %ebp, %esi + roll $5, %edi +.byte 209 +.byte 203 /* rorl $1 %ebx */ + addl %edi, %esi + movl 12(%esp), %edi + movl 20(%esp), %ebp + xorl %ebp, %edi + movl 44(%esp), %ebp + xorl %ebp, %edi + movl (%esp), %ebp +.byte 209 +.byte 203 /* rorl $1 %ebx */ + xorl %ebp, %edi +.byte 209 +.byte 199 /* roll $1 %edi */ + movl %ebx, %ebp + xorl %ecx, %ebp + movl %edi, 12(%esp) + andl %eax, %ebp + leal 1518500249(%edi,%edx,1),%edi + xorl %ecx, %ebp + movl %esi, %edx + roll $5, %edx +.byte 209 +.byte 200 /* rorl $1 %eax */ + addl %edx, %edi +.byte 209 +.byte 200 /* rorl $1 %eax */ + addl %ebp, %edi + /* 20_39 20 */ + movl 16(%esp), %edx + movl 24(%esp), %ebp + xorl %ebp, %edx + movl 48(%esp), %ebp + xorl %ebp, %edx + movl 4(%esp), %ebp + xorl %ebp, %edx + movl %esi, %ebp +.byte 209 +.byte 194 /* roll $1 %edx */ + xorl %eax, %ebp + movl %edx, 16(%esp) + xorl %ebx, %ebp + leal 1859775393(%edx,%ecx,1),%edx + movl %edi, %ecx + roll $5, %ecx +.byte 209 +.byte 206 /* rorl $1 %esi */ + addl %ebp, %ecx +.byte 209 +.byte 206 /* rorl $1 %esi */ + addl %ecx, %edx + /* 20_39 21 */ + movl 20(%esp), %ecx + movl 28(%esp), %ebp + xorl %ebp, %ecx + movl 52(%esp), %ebp + xorl %ebp, %ecx + movl 8(%esp), %ebp + xorl %ebp, %ecx + movl %edi, %ebp +.byte 209 +.byte 193 /* roll $1 %ecx */ + xorl %esi, %ebp + movl %ecx, 20(%esp) + xorl %eax, %ebp + leal 1859775393(%ecx,%ebx,1),%ecx + movl %edx, %ebx + roll $5, %ebx +.byte 209 +.byte 207 /* rorl $1 %edi */ + addl %ebp, %ebx +.byte 209 +.byte 207 /* rorl $1 %edi */ + addl %ebx, %ecx + /* 20_39 22 */ + movl 24(%esp), %ebx + movl 32(%esp), %ebp + xorl %ebp, %ebx + movl 56(%esp), %ebp + xorl %ebp, %ebx + movl 12(%esp), %ebp + xorl %ebp, %ebx + movl %edx, %ebp +.byte 209 +.byte 195 /* roll $1 %ebx */ + xorl %edi, %ebp + movl %ebx, 24(%esp) + xorl %esi, %ebp + leal 1859775393(%ebx,%eax,1),%ebx + movl %ecx, %eax + roll $5, %eax +.byte 209 +.byte 202 /* rorl $1 %edx */ + addl %ebp, %eax +.byte 209 +.byte 202 /* rorl $1 %edx */ + addl %eax, %ebx + /* 20_39 23 */ + movl 28(%esp), %eax + movl 36(%esp), %ebp + xorl %ebp, %eax + movl 60(%esp), %ebp + xorl %ebp, %eax + movl 16(%esp), %ebp + xorl %ebp, %eax + movl %ecx, %ebp +.byte 209 +.byte 192 /* roll $1 %eax */ + xorl %edx, %ebp + movl %eax, 28(%esp) + xorl %edi, %ebp + leal 1859775393(%eax,%esi,1),%eax + movl %ebx, %esi + roll $5, %esi +.byte 209 +.byte 201 /* rorl $1 %ecx */ + addl %ebp, %esi +.byte 209 +.byte 201 /* rorl $1 %ecx */ + addl %esi, %eax + /* 20_39 24 */ + movl 32(%esp), %esi + movl 40(%esp), %ebp + xorl %ebp, %esi + movl (%esp), %ebp + xorl %ebp, %esi + movl 20(%esp), %ebp + xorl %ebp, %esi + movl %ebx, %ebp +.byte 209 +.byte 198 /* roll $1 %esi */ + xorl %ecx, %ebp + movl %esi, 32(%esp) + xorl %edx, %ebp + leal 1859775393(%esi,%edi,1),%esi + movl %eax, %edi + roll $5, %edi +.byte 209 +.byte 203 /* rorl $1 %ebx */ + addl %ebp, %edi +.byte 209 +.byte 203 /* rorl $1 %ebx */ + addl %edi, %esi + /* 20_39 25 */ + movl 36(%esp), %edi + movl 44(%esp), %ebp + xorl %ebp, %edi + movl 4(%esp), %ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + xorl %ebp, %edi + movl %eax, %ebp +.byte 209 +.byte 199 /* roll $1 %edi */ + xorl %ebx, %ebp + movl %edi, 36(%esp) + xorl %ecx, %ebp + leal 1859775393(%edi,%edx,1),%edi + movl %esi, %edx + roll $5, %edx +.byte 209 +.byte 200 /* rorl $1 %eax */ + addl %ebp, %edx +.byte 209 +.byte 200 /* rorl $1 %eax */ + addl %edx, %edi + /* 20_39 26 */ + movl 40(%esp), %edx + movl 48(%esp), %ebp + xorl %ebp, %edx + movl 8(%esp), %ebp + xorl %ebp, %edx + movl 28(%esp), %ebp + xorl %ebp, %edx + movl %esi, %ebp +.byte 209 +.byte 194 /* roll $1 %edx */ + xorl %eax, %ebp + movl %edx, 40(%esp) + xorl %ebx, %ebp + leal 1859775393(%edx,%ecx,1),%edx + movl %edi, %ecx + roll $5, %ecx +.byte 209 +.byte 206 /* rorl $1 %esi */ + addl %ebp, %ecx +.byte 209 +.byte 206 /* rorl $1 %esi */ + addl %ecx, %edx + /* 20_39 27 */ + movl 44(%esp), %ecx + movl 52(%esp), %ebp + xorl %ebp, %ecx + movl 12(%esp), %ebp + xorl %ebp, %ecx + movl 32(%esp), %ebp + xorl %ebp, %ecx + movl %edi, %ebp +.byte 209 +.byte 193 /* roll $1 %ecx */ + xorl %esi, %ebp + movl %ecx, 44(%esp) + xorl %eax, %ebp + leal 1859775393(%ecx,%ebx,1),%ecx + movl %edx, %ebx + roll $5, %ebx +.byte 209 +.byte 207 /* rorl $1 %edi */ + addl %ebp, %ebx +.byte 209 +.byte 207 /* rorl $1 %edi */ + addl %ebx, %ecx + /* 20_39 28 */ + movl 48(%esp), %ebx + movl 56(%esp), %ebp + xorl %ebp, %ebx + movl 16(%esp), %ebp + xorl %ebp, %ebx + movl 36(%esp), %ebp + xorl %ebp, %ebx + movl %edx, %ebp +.byte 209 +.byte 195 /* roll $1 %ebx */ + xorl %edi, %ebp + movl %ebx, 48(%esp) + xorl %esi, %ebp + leal 1859775393(%ebx,%eax,1),%ebx + movl %ecx, %eax + roll $5, %eax +.byte 209 +.byte 202 /* rorl $1 %edx */ + addl %ebp, %eax +.byte 209 +.byte 202 /* rorl $1 %edx */ + addl %eax, %ebx + /* 20_39 29 */ + movl 52(%esp), %eax + movl 60(%esp), %ebp + xorl %ebp, %eax + movl 20(%esp), %ebp + xorl %ebp, %eax + movl 40(%esp), %ebp + xorl %ebp, %eax + movl %ecx, %ebp +.byte 209 +.byte 192 /* roll $1 %eax */ + xorl %edx, %ebp + movl %eax, 52(%esp) + xorl %edi, %ebp + leal 1859775393(%eax,%esi,1),%eax + movl %ebx, %esi + roll $5, %esi +.byte 209 +.byte 201 /* rorl $1 %ecx */ + addl %ebp, %esi +.byte 209 +.byte 201 /* rorl $1 %ecx */ + addl %esi, %eax + /* 20_39 30 */ + movl 56(%esp), %esi + movl (%esp), %ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + xorl %ebp, %esi + movl 44(%esp), %ebp + xorl %ebp, %esi + movl %ebx, %ebp +.byte 209 +.byte 198 /* roll $1 %esi */ + xorl %ecx, %ebp + movl %esi, 56(%esp) + xorl %edx, %ebp + leal 1859775393(%esi,%edi,1),%esi + movl %eax, %edi + roll $5, %edi +.byte 209 +.byte 203 /* rorl $1 %ebx */ + addl %ebp, %edi +.byte 209 +.byte 203 /* rorl $1 %ebx */ + addl %edi, %esi + /* 20_39 31 */ + movl 60(%esp), %edi + movl 4(%esp), %ebp + xorl %ebp, %edi + movl 28(%esp), %ebp + xorl %ebp, %edi + movl 48(%esp), %ebp + xorl %ebp, %edi + movl %eax, %ebp +.byte 209 +.byte 199 /* roll $1 %edi */ + xorl %ebx, %ebp + movl %edi, 60(%esp) + xorl %ecx, %ebp + leal 1859775393(%edi,%edx,1),%edi + movl %esi, %edx + roll $5, %edx +.byte 209 +.byte 200 /* rorl $1 %eax */ + addl %ebp, %edx +.byte 209 +.byte 200 /* rorl $1 %eax */ + addl %edx, %edi + /* 20_39 32 */ + movl (%esp), %edx + movl 8(%esp), %ebp + xorl %ebp, %edx + movl 32(%esp), %ebp + xorl %ebp, %edx + movl 52(%esp), %ebp + xorl %ebp, %edx + movl %esi, %ebp +.byte 209 +.byte 194 /* roll $1 %edx */ + xorl %eax, %ebp + movl %edx, (%esp) + xorl %ebx, %ebp + leal 1859775393(%edx,%ecx,1),%edx + movl %edi, %ecx + roll $5, %ecx +.byte 209 +.byte 206 /* rorl $1 %esi */ + addl %ebp, %ecx +.byte 209 +.byte 206 /* rorl $1 %esi */ + addl %ecx, %edx + /* 20_39 33 */ + movl 4(%esp), %ecx + movl 12(%esp), %ebp + xorl %ebp, %ecx + movl 36(%esp), %ebp + xorl %ebp, %ecx + movl 56(%esp), %ebp + xorl %ebp, %ecx + movl %edi, %ebp +.byte 209 +.byte 193 /* roll $1 %ecx */ + xorl %esi, %ebp + movl %ecx, 4(%esp) + xorl %eax, %ebp + leal 1859775393(%ecx,%ebx,1),%ecx + movl %edx, %ebx + roll $5, %ebx +.byte 209 +.byte 207 /* rorl $1 %edi */ + addl %ebp, %ebx +.byte 209 +.byte 207 /* rorl $1 %edi */ + addl %ebx, %ecx + /* 20_39 34 */ + movl 8(%esp), %ebx + movl 16(%esp), %ebp + xorl %ebp, %ebx + movl 40(%esp), %ebp + xorl %ebp, %ebx + movl 60(%esp), %ebp + xorl %ebp, %ebx + movl %edx, %ebp +.byte 209 +.byte 195 /* roll $1 %ebx */ + xorl %edi, %ebp + movl %ebx, 8(%esp) + xorl %esi, %ebp + leal 1859775393(%ebx,%eax,1),%ebx + movl %ecx, %eax + roll $5, %eax +.byte 209 +.byte 202 /* rorl $1 %edx */ + addl %ebp, %eax +.byte 209 +.byte 202 /* rorl $1 %edx */ + addl %eax, %ebx + /* 20_39 35 */ + movl 12(%esp), %eax + movl 20(%esp), %ebp + xorl %ebp, %eax + movl 44(%esp), %ebp + xorl %ebp, %eax + movl (%esp), %ebp + xorl %ebp, %eax + movl %ecx, %ebp +.byte 209 +.byte 192 /* roll $1 %eax */ + xorl %edx, %ebp + movl %eax, 12(%esp) + xorl %edi, %ebp + leal 1859775393(%eax,%esi,1),%eax + movl %ebx, %esi + roll $5, %esi +.byte 209 +.byte 201 /* rorl $1 %ecx */ + addl %ebp, %esi +.byte 209 +.byte 201 /* rorl $1 %ecx */ + addl %esi, %eax + /* 20_39 36 */ + movl 16(%esp), %esi + movl 24(%esp), %ebp + xorl %ebp, %esi + movl 48(%esp), %ebp + xorl %ebp, %esi + movl 4(%esp), %ebp + xorl %ebp, %esi + movl %ebx, %ebp +.byte 209 +.byte 198 /* roll $1 %esi */ + xorl %ecx, %ebp + movl %esi, 16(%esp) + xorl %edx, %ebp + leal 1859775393(%esi,%edi,1),%esi + movl %eax, %edi + roll $5, %edi +.byte 209 +.byte 203 /* rorl $1 %ebx */ + addl %ebp, %edi +.byte 209 +.byte 203 /* rorl $1 %ebx */ + addl %edi, %esi + /* 20_39 37 */ + movl 20(%esp), %edi + movl 28(%esp), %ebp + xorl %ebp, %edi + movl 52(%esp), %ebp + xorl %ebp, %edi + movl 8(%esp), %ebp + xorl %ebp, %edi + movl %eax, %ebp +.byte 209 +.byte 199 /* roll $1 %edi */ + xorl %ebx, %ebp + movl %edi, 20(%esp) + xorl %ecx, %ebp + leal 1859775393(%edi,%edx,1),%edi + movl %esi, %edx + roll $5, %edx +.byte 209 +.byte 200 /* rorl $1 %eax */ + addl %ebp, %edx +.byte 209 +.byte 200 /* rorl $1 %eax */ + addl %edx, %edi + /* 20_39 38 */ + movl 24(%esp), %edx + movl 32(%esp), %ebp + xorl %ebp, %edx + movl 56(%esp), %ebp + xorl %ebp, %edx + movl 12(%esp), %ebp + xorl %ebp, %edx + movl %esi, %ebp +.byte 209 +.byte 194 /* roll $1 %edx */ + xorl %eax, %ebp + movl %edx, 24(%esp) + xorl %ebx, %ebp + leal 1859775393(%edx,%ecx,1),%edx + movl %edi, %ecx + roll $5, %ecx +.byte 209 +.byte 206 /* rorl $1 %esi */ + addl %ebp, %ecx +.byte 209 +.byte 206 /* rorl $1 %esi */ + addl %ecx, %edx + /* 20_39 39 */ + movl 28(%esp), %ecx + movl 36(%esp), %ebp + xorl %ebp, %ecx + movl 60(%esp), %ebp + xorl %ebp, %ecx + movl 16(%esp), %ebp + xorl %ebp, %ecx + movl %edi, %ebp +.byte 209 +.byte 193 /* roll $1 %ecx */ + xorl %esi, %ebp + movl %ecx, 28(%esp) + xorl %eax, %ebp + leal 1859775393(%ecx,%ebx,1),%ecx + movl %edx, %ebx + roll $5, %ebx +.byte 209 +.byte 207 /* rorl $1 %edi */ + addl %ebp, %ebx +.byte 209 +.byte 207 /* rorl $1 %edi */ + addl %ebx, %ecx + /* 40_59 40 */ + movl 32(%esp), %ebx + movl 40(%esp), %ebp + xorl %ebp, %ebx + movl (%esp), %ebp + xorl %ebp, %ebx + movl 20(%esp), %ebp + xorl %ebp, %ebx + movl %edx, %ebp +.byte 209 +.byte 195 /* roll $1 %ebx */ + orl %edi, %ebp + movl %ebx, 32(%esp) + andl %esi, %ebp + leal 2400959708(%ebx,%eax,1),%ebx + movl %edx, %eax +.byte 209 +.byte 202 /* rorl $1 %edx */ + andl %edi, %eax + orl %eax, %ebp + movl %ecx, %eax + roll $5, %eax + addl %eax, %ebp + movl 36(%esp), %eax + addl %ebp, %ebx + movl 44(%esp), %ebp + xorl %ebp, %eax + movl 4(%esp), %ebp + xorl %ebp, %eax + movl 24(%esp), %ebp +.byte 209 +.byte 202 /* rorl $1 %edx */ + xorl %ebp, %eax +.byte 209 +.byte 192 /* roll $1 %eax */ + movl %ecx, %ebp + movl %eax, 36(%esp) + orl %edx, %ebp + leal 2400959708(%eax,%esi,1),%eax + movl %ecx, %esi + andl %edi, %ebp + andl %edx, %esi + orl %esi, %ebp + movl %ebx, %esi + roll $5, %esi +.byte 209 +.byte 201 /* rorl $1 %ecx */ + addl %esi, %ebp +.byte 209 +.byte 201 /* rorl $1 %ecx */ + addl %ebp, %eax + /* 40_59 41 */ + /* 40_59 42 */ + movl 40(%esp), %esi + movl 48(%esp), %ebp + xorl %ebp, %esi + movl 8(%esp), %ebp + xorl %ebp, %esi + movl 28(%esp), %ebp + xorl %ebp, %esi + movl %ebx, %ebp +.byte 209 +.byte 198 /* roll $1 %esi */ + orl %ecx, %ebp + movl %esi, 40(%esp) + andl %edx, %ebp + leal 2400959708(%esi,%edi,1),%esi + movl %ebx, %edi +.byte 209 +.byte 203 /* rorl $1 %ebx */ + andl %ecx, %edi + orl %edi, %ebp + movl %eax, %edi + roll $5, %edi + addl %edi, %ebp + movl 44(%esp), %edi + addl %ebp, %esi + movl 52(%esp), %ebp + xorl %ebp, %edi + movl 12(%esp), %ebp + xorl %ebp, %edi + movl 32(%esp), %ebp +.byte 209 +.byte 203 /* rorl $1 %ebx */ + xorl %ebp, %edi +.byte 209 +.byte 199 /* roll $1 %edi */ + movl %eax, %ebp + movl %edi, 44(%esp) + orl %ebx, %ebp + leal 2400959708(%edi,%edx,1),%edi + movl %eax, %edx + andl %ecx, %ebp + andl %ebx, %edx + orl %edx, %ebp + movl %esi, %edx + roll $5, %edx +.byte 209 +.byte 200 /* rorl $1 %eax */ + addl %edx, %ebp +.byte 209 +.byte 200 /* rorl $1 %eax */ + addl %ebp, %edi + /* 40_59 43 */ + /* 40_59 44 */ + movl 48(%esp), %edx + movl 56(%esp), %ebp + xorl %ebp, %edx + movl 16(%esp), %ebp + xorl %ebp, %edx + movl 36(%esp), %ebp + xorl %ebp, %edx + movl %esi, %ebp +.byte 209 +.byte 194 /* roll $1 %edx */ + orl %eax, %ebp + movl %edx, 48(%esp) + andl %ebx, %ebp + leal 2400959708(%edx,%ecx,1),%edx + movl %esi, %ecx +.byte 209 +.byte 206 /* rorl $1 %esi */ + andl %eax, %ecx + orl %ecx, %ebp + movl %edi, %ecx + roll $5, %ecx + addl %ecx, %ebp + movl 52(%esp), %ecx + addl %ebp, %edx + movl 60(%esp), %ebp + xorl %ebp, %ecx + movl 20(%esp), %ebp + xorl %ebp, %ecx + movl 40(%esp), %ebp +.byte 209 +.byte 206 /* rorl $1 %esi */ + xorl %ebp, %ecx +.byte 209 +.byte 193 /* roll $1 %ecx */ + movl %edi, %ebp + movl %ecx, 52(%esp) + orl %esi, %ebp + leal 2400959708(%ecx,%ebx,1),%ecx + movl %edi, %ebx + andl %eax, %ebp + andl %esi, %ebx + orl %ebx, %ebp + movl %edx, %ebx + roll $5, %ebx +.byte 209 +.byte 207 /* rorl $1 %edi */ + addl %ebx, %ebp +.byte 209 +.byte 207 /* rorl $1 %edi */ + addl %ebp, %ecx + /* 40_59 45 */ + /* 40_59 46 */ + movl 56(%esp), %ebx + movl (%esp), %ebp + xorl %ebp, %ebx + movl 24(%esp), %ebp + xorl %ebp, %ebx + movl 44(%esp), %ebp + xorl %ebp, %ebx + movl %edx, %ebp +.byte 209 +.byte 195 /* roll $1 %ebx */ + orl %edi, %ebp + movl %ebx, 56(%esp) + andl %esi, %ebp + leal 2400959708(%ebx,%eax,1),%ebx + movl %edx, %eax +.byte 209 +.byte 202 /* rorl $1 %edx */ + andl %edi, %eax + orl %eax, %ebp + movl %ecx, %eax + roll $5, %eax + addl %eax, %ebp + movl 60(%esp), %eax + addl %ebp, %ebx + movl 4(%esp), %ebp + xorl %ebp, %eax + movl 28(%esp), %ebp + xorl %ebp, %eax + movl 48(%esp), %ebp +.byte 209 +.byte 202 /* rorl $1 %edx */ + xorl %ebp, %eax +.byte 209 +.byte 192 /* roll $1 %eax */ + movl %ecx, %ebp + movl %eax, 60(%esp) + orl %edx, %ebp + leal 2400959708(%eax,%esi,1),%eax + movl %ecx, %esi + andl %edi, %ebp + andl %edx, %esi + orl %esi, %ebp + movl %ebx, %esi + roll $5, %esi +.byte 209 +.byte 201 /* rorl $1 %ecx */ + addl %esi, %ebp +.byte 209 +.byte 201 /* rorl $1 %ecx */ + addl %ebp, %eax + /* 40_59 47 */ + /* 40_59 48 */ + movl (%esp), %esi + movl 8(%esp), %ebp + xorl %ebp, %esi + movl 32(%esp), %ebp + xorl %ebp, %esi + movl 52(%esp), %ebp + xorl %ebp, %esi + movl %ebx, %ebp +.byte 209 +.byte 198 /* roll $1 %esi */ + orl %ecx, %ebp + movl %esi, (%esp) + andl %edx, %ebp + leal 2400959708(%esi,%edi,1),%esi + movl %ebx, %edi +.byte 209 +.byte 203 /* rorl $1 %ebx */ + andl %ecx, %edi + orl %edi, %ebp + movl %eax, %edi + roll $5, %edi + addl %edi, %ebp + movl 4(%esp), %edi + addl %ebp, %esi + movl 12(%esp), %ebp + xorl %ebp, %edi + movl 36(%esp), %ebp + xorl %ebp, %edi + movl 56(%esp), %ebp +.byte 209 +.byte 203 /* rorl $1 %ebx */ + xorl %ebp, %edi +.byte 209 +.byte 199 /* roll $1 %edi */ + movl %eax, %ebp + movl %edi, 4(%esp) + orl %ebx, %ebp + leal 2400959708(%edi,%edx,1),%edi + movl %eax, %edx + andl %ecx, %ebp + andl %ebx, %edx + orl %edx, %ebp + movl %esi, %edx + roll $5, %edx +.byte 209 +.byte 200 /* rorl $1 %eax */ + addl %edx, %ebp +.byte 209 +.byte 200 /* rorl $1 %eax */ + addl %ebp, %edi + /* 40_59 49 */ + /* 40_59 50 */ + movl 8(%esp), %edx + movl 16(%esp), %ebp + xorl %ebp, %edx + movl 40(%esp), %ebp + xorl %ebp, %edx + movl 60(%esp), %ebp + xorl %ebp, %edx + movl %esi, %ebp +.byte 209 +.byte 194 /* roll $1 %edx */ + orl %eax, %ebp + movl %edx, 8(%esp) + andl %ebx, %ebp + leal 2400959708(%edx,%ecx,1),%edx + movl %esi, %ecx +.byte 209 +.byte 206 /* rorl $1 %esi */ + andl %eax, %ecx + orl %ecx, %ebp + movl %edi, %ecx + roll $5, %ecx + addl %ecx, %ebp + movl 12(%esp), %ecx + addl %ebp, %edx + movl 20(%esp), %ebp + xorl %ebp, %ecx + movl 44(%esp), %ebp + xorl %ebp, %ecx + movl (%esp), %ebp +.byte 209 +.byte 206 /* rorl $1 %esi */ + xorl %ebp, %ecx +.byte 209 +.byte 193 /* roll $1 %ecx */ + movl %edi, %ebp + movl %ecx, 12(%esp) + orl %esi, %ebp + leal 2400959708(%ecx,%ebx,1),%ecx + movl %edi, %ebx + andl %eax, %ebp + andl %esi, %ebx + orl %ebx, %ebp + movl %edx, %ebx + roll $5, %ebx +.byte 209 +.byte 207 /* rorl $1 %edi */ + addl %ebx, %ebp +.byte 209 +.byte 207 /* rorl $1 %edi */ + addl %ebp, %ecx + /* 40_59 51 */ + /* 40_59 52 */ + movl 16(%esp), %ebx + movl 24(%esp), %ebp + xorl %ebp, %ebx + movl 48(%esp), %ebp + xorl %ebp, %ebx + movl 4(%esp), %ebp + xorl %ebp, %ebx + movl %edx, %ebp +.byte 209 +.byte 195 /* roll $1 %ebx */ + orl %edi, %ebp + movl %ebx, 16(%esp) + andl %esi, %ebp + leal 2400959708(%ebx,%eax,1),%ebx + movl %edx, %eax +.byte 209 +.byte 202 /* rorl $1 %edx */ + andl %edi, %eax + orl %eax, %ebp + movl %ecx, %eax + roll $5, %eax + addl %eax, %ebp + movl 20(%esp), %eax + addl %ebp, %ebx + movl 28(%esp), %ebp + xorl %ebp, %eax + movl 52(%esp), %ebp + xorl %ebp, %eax + movl 8(%esp), %ebp +.byte 209 +.byte 202 /* rorl $1 %edx */ + xorl %ebp, %eax +.byte 209 +.byte 192 /* roll $1 %eax */ + movl %ecx, %ebp + movl %eax, 20(%esp) + orl %edx, %ebp + leal 2400959708(%eax,%esi,1),%eax + movl %ecx, %esi + andl %edi, %ebp + andl %edx, %esi + orl %esi, %ebp + movl %ebx, %esi + roll $5, %esi +.byte 209 +.byte 201 /* rorl $1 %ecx */ + addl %esi, %ebp +.byte 209 +.byte 201 /* rorl $1 %ecx */ + addl %ebp, %eax + /* 40_59 53 */ + /* 40_59 54 */ + movl 24(%esp), %esi + movl 32(%esp), %ebp + xorl %ebp, %esi + movl 56(%esp), %ebp + xorl %ebp, %esi + movl 12(%esp), %ebp + xorl %ebp, %esi + movl %ebx, %ebp +.byte 209 +.byte 198 /* roll $1 %esi */ + orl %ecx, %ebp + movl %esi, 24(%esp) + andl %edx, %ebp + leal 2400959708(%esi,%edi,1),%esi + movl %ebx, %edi +.byte 209 +.byte 203 /* rorl $1 %ebx */ + andl %ecx, %edi + orl %edi, %ebp + movl %eax, %edi + roll $5, %edi + addl %edi, %ebp + movl 28(%esp), %edi + addl %ebp, %esi + movl 36(%esp), %ebp + xorl %ebp, %edi + movl 60(%esp), %ebp + xorl %ebp, %edi + movl 16(%esp), %ebp +.byte 209 +.byte 203 /* rorl $1 %ebx */ + xorl %ebp, %edi +.byte 209 +.byte 199 /* roll $1 %edi */ + movl %eax, %ebp + movl %edi, 28(%esp) + orl %ebx, %ebp + leal 2400959708(%edi,%edx,1),%edi + movl %eax, %edx + andl %ecx, %ebp + andl %ebx, %edx + orl %edx, %ebp + movl %esi, %edx + roll $5, %edx +.byte 209 +.byte 200 /* rorl $1 %eax */ + addl %edx, %ebp +.byte 209 +.byte 200 /* rorl $1 %eax */ + addl %ebp, %edi + /* 40_59 55 */ + /* 40_59 56 */ + movl 32(%esp), %edx + movl 40(%esp), %ebp + xorl %ebp, %edx + movl (%esp), %ebp + xorl %ebp, %edx + movl 20(%esp), %ebp + xorl %ebp, %edx + movl %esi, %ebp +.byte 209 +.byte 194 /* roll $1 %edx */ + orl %eax, %ebp + movl %edx, 32(%esp) + andl %ebx, %ebp + leal 2400959708(%edx,%ecx,1),%edx + movl %esi, %ecx +.byte 209 +.byte 206 /* rorl $1 %esi */ + andl %eax, %ecx + orl %ecx, %ebp + movl %edi, %ecx + roll $5, %ecx + addl %ecx, %ebp + movl 36(%esp), %ecx + addl %ebp, %edx + movl 44(%esp), %ebp + xorl %ebp, %ecx + movl 4(%esp), %ebp + xorl %ebp, %ecx + movl 24(%esp), %ebp +.byte 209 +.byte 206 /* rorl $1 %esi */ + xorl %ebp, %ecx +.byte 209 +.byte 193 /* roll $1 %ecx */ + movl %edi, %ebp + movl %ecx, 36(%esp) + orl %esi, %ebp + leal 2400959708(%ecx,%ebx,1),%ecx + movl %edi, %ebx + andl %eax, %ebp + andl %esi, %ebx + orl %ebx, %ebp + movl %edx, %ebx + roll $5, %ebx +.byte 209 +.byte 207 /* rorl $1 %edi */ + addl %ebx, %ebp +.byte 209 +.byte 207 /* rorl $1 %edi */ + addl %ebp, %ecx + /* 40_59 57 */ + /* 40_59 58 */ + movl 40(%esp), %ebx + movl 48(%esp), %ebp + xorl %ebp, %ebx + movl 8(%esp), %ebp + xorl %ebp, %ebx + movl 28(%esp), %ebp + xorl %ebp, %ebx + movl %edx, %ebp +.byte 209 +.byte 195 /* roll $1 %ebx */ + orl %edi, %ebp + movl %ebx, 40(%esp) + andl %esi, %ebp + leal 2400959708(%ebx,%eax,1),%ebx + movl %edx, %eax +.byte 209 +.byte 202 /* rorl $1 %edx */ + andl %edi, %eax + orl %eax, %ebp + movl %ecx, %eax + roll $5, %eax + addl %eax, %ebp + movl 44(%esp), %eax + addl %ebp, %ebx + movl 52(%esp), %ebp + xorl %ebp, %eax + movl 12(%esp), %ebp + xorl %ebp, %eax + movl 32(%esp), %ebp +.byte 209 +.byte 202 /* rorl $1 %edx */ + xorl %ebp, %eax +.byte 209 +.byte 192 /* roll $1 %eax */ + movl %ecx, %ebp + movl %eax, 44(%esp) + orl %edx, %ebp + leal 2400959708(%eax,%esi,1),%eax + movl %ecx, %esi + andl %edi, %ebp + andl %edx, %esi + orl %esi, %ebp + movl %ebx, %esi + roll $5, %esi +.byte 209 +.byte 201 /* rorl $1 %ecx */ + addl %esi, %ebp +.byte 209 +.byte 201 /* rorl $1 %ecx */ + addl %ebp, %eax + /* 40_59 59 */ + /* 20_39 60 */ + movl 48(%esp), %esi + movl 56(%esp), %ebp + xorl %ebp, %esi + movl 16(%esp), %ebp + xorl %ebp, %esi + movl 36(%esp), %ebp + xorl %ebp, %esi + movl %ebx, %ebp +.byte 209 +.byte 198 /* roll $1 %esi */ + xorl %ecx, %ebp + movl %esi, 48(%esp) + xorl %edx, %ebp + leal 3395469782(%esi,%edi,1),%esi + movl %eax, %edi + roll $5, %edi +.byte 209 +.byte 203 /* rorl $1 %ebx */ + addl %ebp, %edi +.byte 209 +.byte 203 /* rorl $1 %ebx */ + addl %edi, %esi + /* 20_39 61 */ + movl 52(%esp), %edi + movl 60(%esp), %ebp + xorl %ebp, %edi + movl 20(%esp), %ebp + xorl %ebp, %edi + movl 40(%esp), %ebp + xorl %ebp, %edi + movl %eax, %ebp +.byte 209 +.byte 199 /* roll $1 %edi */ + xorl %ebx, %ebp + movl %edi, 52(%esp) + xorl %ecx, %ebp + leal 3395469782(%edi,%edx,1),%edi + movl %esi, %edx + roll $5, %edx +.byte 209 +.byte 200 /* rorl $1 %eax */ + addl %ebp, %edx +.byte 209 +.byte 200 /* rorl $1 %eax */ + addl %edx, %edi + /* 20_39 62 */ + movl 56(%esp), %edx + movl (%esp), %ebp + xorl %ebp, %edx + movl 24(%esp), %ebp + xorl %ebp, %edx + movl 44(%esp), %ebp + xorl %ebp, %edx + movl %esi, %ebp +.byte 209 +.byte 194 /* roll $1 %edx */ + xorl %eax, %ebp + movl %edx, 56(%esp) + xorl %ebx, %ebp + leal 3395469782(%edx,%ecx,1),%edx + movl %edi, %ecx + roll $5, %ecx +.byte 209 +.byte 206 /* rorl $1 %esi */ + addl %ebp, %ecx +.byte 209 +.byte 206 /* rorl $1 %esi */ + addl %ecx, %edx + /* 20_39 63 */ + movl 60(%esp), %ecx + movl 4(%esp), %ebp + xorl %ebp, %ecx + movl 28(%esp), %ebp + xorl %ebp, %ecx + movl 48(%esp), %ebp + xorl %ebp, %ecx + movl %edi, %ebp +.byte 209 +.byte 193 /* roll $1 %ecx */ + xorl %esi, %ebp + movl %ecx, 60(%esp) + xorl %eax, %ebp + leal 3395469782(%ecx,%ebx,1),%ecx + movl %edx, %ebx + roll $5, %ebx +.byte 209 +.byte 207 /* rorl $1 %edi */ + addl %ebp, %ebx +.byte 209 +.byte 207 /* rorl $1 %edi */ + addl %ebx, %ecx + /* 20_39 64 */ + movl (%esp), %ebx + movl 8(%esp), %ebp + xorl %ebp, %ebx + movl 32(%esp), %ebp + xorl %ebp, %ebx + movl 52(%esp), %ebp + xorl %ebp, %ebx + movl %edx, %ebp +.byte 209 +.byte 195 /* roll $1 %ebx */ + xorl %edi, %ebp + movl %ebx, (%esp) + xorl %esi, %ebp + leal 3395469782(%ebx,%eax,1),%ebx + movl %ecx, %eax + roll $5, %eax +.byte 209 +.byte 202 /* rorl $1 %edx */ + addl %ebp, %eax +.byte 209 +.byte 202 /* rorl $1 %edx */ + addl %eax, %ebx + /* 20_39 65 */ + movl 4(%esp), %eax + movl 12(%esp), %ebp + xorl %ebp, %eax + movl 36(%esp), %ebp + xorl %ebp, %eax + movl 56(%esp), %ebp + xorl %ebp, %eax + movl %ecx, %ebp +.byte 209 +.byte 192 /* roll $1 %eax */ + xorl %edx, %ebp + movl %eax, 4(%esp) + xorl %edi, %ebp + leal 3395469782(%eax,%esi,1),%eax + movl %ebx, %esi + roll $5, %esi +.byte 209 +.byte 201 /* rorl $1 %ecx */ + addl %ebp, %esi +.byte 209 +.byte 201 /* rorl $1 %ecx */ + addl %esi, %eax + /* 20_39 66 */ + movl 8(%esp), %esi + movl 16(%esp), %ebp + xorl %ebp, %esi + movl 40(%esp), %ebp + xorl %ebp, %esi + movl 60(%esp), %ebp + xorl %ebp, %esi + movl %ebx, %ebp +.byte 209 +.byte 198 /* roll $1 %esi */ + xorl %ecx, %ebp + movl %esi, 8(%esp) + xorl %edx, %ebp + leal 3395469782(%esi,%edi,1),%esi + movl %eax, %edi + roll $5, %edi +.byte 209 +.byte 203 /* rorl $1 %ebx */ + addl %ebp, %edi +.byte 209 +.byte 203 /* rorl $1 %ebx */ + addl %edi, %esi + /* 20_39 67 */ + movl 12(%esp), %edi + movl 20(%esp), %ebp + xorl %ebp, %edi + movl 44(%esp), %ebp + xorl %ebp, %edi + movl (%esp), %ebp + xorl %ebp, %edi + movl %eax, %ebp +.byte 209 +.byte 199 /* roll $1 %edi */ + xorl %ebx, %ebp + movl %edi, 12(%esp) + xorl %ecx, %ebp + leal 3395469782(%edi,%edx,1),%edi + movl %esi, %edx + roll $5, %edx +.byte 209 +.byte 200 /* rorl $1 %eax */ + addl %ebp, %edx +.byte 209 +.byte 200 /* rorl $1 %eax */ + addl %edx, %edi + /* 20_39 68 */ + movl 16(%esp), %edx + movl 24(%esp), %ebp + xorl %ebp, %edx + movl 48(%esp), %ebp + xorl %ebp, %edx + movl 4(%esp), %ebp + xorl %ebp, %edx + movl %esi, %ebp +.byte 209 +.byte 194 /* roll $1 %edx */ + xorl %eax, %ebp + movl %edx, 16(%esp) + xorl %ebx, %ebp + leal 3395469782(%edx,%ecx,1),%edx + movl %edi, %ecx + roll $5, %ecx +.byte 209 +.byte 206 /* rorl $1 %esi */ + addl %ebp, %ecx +.byte 209 +.byte 206 /* rorl $1 %esi */ + addl %ecx, %edx + /* 20_39 69 */ + movl 20(%esp), %ecx + movl 28(%esp), %ebp + xorl %ebp, %ecx + movl 52(%esp), %ebp + xorl %ebp, %ecx + movl 8(%esp), %ebp + xorl %ebp, %ecx + movl %edi, %ebp +.byte 209 +.byte 193 /* roll $1 %ecx */ + xorl %esi, %ebp + movl %ecx, 20(%esp) + xorl %eax, %ebp + leal 3395469782(%ecx,%ebx,1),%ecx + movl %edx, %ebx + roll $5, %ebx +.byte 209 +.byte 207 /* rorl $1 %edi */ + addl %ebp, %ebx +.byte 209 +.byte 207 /* rorl $1 %edi */ + addl %ebx, %ecx + /* 20_39 70 */ + movl 24(%esp), %ebx + movl 32(%esp), %ebp + xorl %ebp, %ebx + movl 56(%esp), %ebp + xorl %ebp, %ebx + movl 12(%esp), %ebp + xorl %ebp, %ebx + movl %edx, %ebp +.byte 209 +.byte 195 /* roll $1 %ebx */ + xorl %edi, %ebp + movl %ebx, 24(%esp) + xorl %esi, %ebp + leal 3395469782(%ebx,%eax,1),%ebx + movl %ecx, %eax + roll $5, %eax +.byte 209 +.byte 202 /* rorl $1 %edx */ + addl %ebp, %eax +.byte 209 +.byte 202 /* rorl $1 %edx */ + addl %eax, %ebx + /* 20_39 71 */ + movl 28(%esp), %eax + movl 36(%esp), %ebp + xorl %ebp, %eax + movl 60(%esp), %ebp + xorl %ebp, %eax + movl 16(%esp), %ebp + xorl %ebp, %eax + movl %ecx, %ebp +.byte 209 +.byte 192 /* roll $1 %eax */ + xorl %edx, %ebp + movl %eax, 28(%esp) + xorl %edi, %ebp + leal 3395469782(%eax,%esi,1),%eax + movl %ebx, %esi + roll $5, %esi +.byte 209 +.byte 201 /* rorl $1 %ecx */ + addl %ebp, %esi +.byte 209 +.byte 201 /* rorl $1 %ecx */ + addl %esi, %eax + /* 20_39 72 */ + movl 32(%esp), %esi + movl 40(%esp), %ebp + xorl %ebp, %esi + movl (%esp), %ebp + xorl %ebp, %esi + movl 20(%esp), %ebp + xorl %ebp, %esi + movl %ebx, %ebp +.byte 209 +.byte 198 /* roll $1 %esi */ + xorl %ecx, %ebp + movl %esi, 32(%esp) + xorl %edx, %ebp + leal 3395469782(%esi,%edi,1),%esi + movl %eax, %edi + roll $5, %edi +.byte 209 +.byte 203 /* rorl $1 %ebx */ + addl %ebp, %edi +.byte 209 +.byte 203 /* rorl $1 %ebx */ + addl %edi, %esi + /* 20_39 73 */ + movl 36(%esp), %edi + movl 44(%esp), %ebp + xorl %ebp, %edi + movl 4(%esp), %ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + xorl %ebp, %edi + movl %eax, %ebp +.byte 209 +.byte 199 /* roll $1 %edi */ + xorl %ebx, %ebp + movl %edi, 36(%esp) + xorl %ecx, %ebp + leal 3395469782(%edi,%edx,1),%edi + movl %esi, %edx + roll $5, %edx +.byte 209 +.byte 200 /* rorl $1 %eax */ + addl %ebp, %edx +.byte 209 +.byte 200 /* rorl $1 %eax */ + addl %edx, %edi + /* 20_39 74 */ + movl 40(%esp), %edx + movl 48(%esp), %ebp + xorl %ebp, %edx + movl 8(%esp), %ebp + xorl %ebp, %edx + movl 28(%esp), %ebp + xorl %ebp, %edx + movl %esi, %ebp +.byte 209 +.byte 194 /* roll $1 %edx */ + xorl %eax, %ebp + movl %edx, 40(%esp) + xorl %ebx, %ebp + leal 3395469782(%edx,%ecx,1),%edx + movl %edi, %ecx + roll $5, %ecx +.byte 209 +.byte 206 /* rorl $1 %esi */ + addl %ebp, %ecx +.byte 209 +.byte 206 /* rorl $1 %esi */ + addl %ecx, %edx + /* 20_39 75 */ + movl 44(%esp), %ecx + movl 52(%esp), %ebp + xorl %ebp, %ecx + movl 12(%esp), %ebp + xorl %ebp, %ecx + movl 32(%esp), %ebp + xorl %ebp, %ecx + movl %edi, %ebp +.byte 209 +.byte 193 /* roll $1 %ecx */ + xorl %esi, %ebp + movl %ecx, 44(%esp) + xorl %eax, %ebp + leal 3395469782(%ecx,%ebx,1),%ecx + movl %edx, %ebx + roll $5, %ebx +.byte 209 +.byte 207 /* rorl $1 %edi */ + addl %ebp, %ebx +.byte 209 +.byte 207 /* rorl $1 %edi */ + addl %ebx, %ecx + /* 20_39 76 */ + movl 48(%esp), %ebx + movl 56(%esp), %ebp + xorl %ebp, %ebx + movl 16(%esp), %ebp + xorl %ebp, %ebx + movl 36(%esp), %ebp + xorl %ebp, %ebx + movl %edx, %ebp +.byte 209 +.byte 195 /* roll $1 %ebx */ + xorl %edi, %ebp + movl %ebx, 48(%esp) + xorl %esi, %ebp + leal 3395469782(%ebx,%eax,1),%ebx + movl %ecx, %eax + roll $5, %eax +.byte 209 +.byte 202 /* rorl $1 %edx */ + addl %ebp, %eax +.byte 209 +.byte 202 /* rorl $1 %edx */ + addl %eax, %ebx + /* 20_39 77 */ + movl 52(%esp), %eax + movl 60(%esp), %ebp + xorl %ebp, %eax + movl 20(%esp), %ebp + xorl %ebp, %eax + movl 40(%esp), %ebp + xorl %ebp, %eax + movl %ecx, %ebp +.byte 209 +.byte 192 /* roll $1 %eax */ + xorl %edx, %ebp + movl %eax, 52(%esp) + xorl %edi, %ebp + leal 3395469782(%eax,%esi,1),%eax + movl %ebx, %esi + roll $5, %esi +.byte 209 +.byte 201 /* rorl $1 %ecx */ + addl %ebp, %esi +.byte 209 +.byte 201 /* rorl $1 %ecx */ + addl %esi, %eax + /* 20_39 78 */ + movl 56(%esp), %esi + movl (%esp), %ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + xorl %ebp, %esi + movl 44(%esp), %ebp + xorl %ebp, %esi + movl %ebx, %ebp +.byte 209 +.byte 198 /* roll $1 %esi */ + xorl %ecx, %ebp + movl %esi, 56(%esp) + xorl %edx, %ebp + leal 3395469782(%esi,%edi,1),%esi + movl %eax, %edi + roll $5, %edi +.byte 209 +.byte 203 /* rorl $1 %ebx */ + addl %ebp, %edi +.byte 209 +.byte 203 /* rorl $1 %ebx */ + addl %edi, %esi + /* 20_39 79 */ + movl 60(%esp), %edi + movl 4(%esp), %ebp + xorl %ebp, %edi + movl 28(%esp), %ebp + xorl %ebp, %edi + movl 48(%esp), %ebp + xorl %ebp, %edi + movl %eax, %ebp +.byte 209 +.byte 199 /* roll $1 %edi */ + xorl %ebx, %ebp + movl %edi, 60(%esp) + xorl %ecx, %ebp + leal 3395469782(%edi,%edx,1),%edi + movl %esi, %edx + roll $5, %edx + addl %ebp, %edx + movl 92(%esp), %ebp +.byte 209 +.byte 200 /* rorl $1 %eax */ + addl %edx, %edi +.byte 209 +.byte 200 /* rorl $1 %eax */ + /* End processing */ + + movl 12(%ebp), %edx + addl %ebx, %edx + movl 4(%ebp), %ebx + addl %esi, %ebx + movl %eax, %esi + movl (%ebp), %eax + movl %edx, 12(%ebp) + addl %edi, %eax + movl 16(%ebp), %edi + addl %ecx, %edi + movl 8(%ebp), %ecx + addl %esi, %ecx + movl %eax, (%ebp) + movl 64(%esp), %esi + movl %ecx, 8(%ebp) + addl $64, %esi + movl 68(%esp), %eax + movl %edi, 16(%ebp) + cmpl %esi, %eax + movl %ebx, 4(%ebp) + jb .L001end + movl (%esi), %eax + jmp .L000start +.L001end: + addl $72, %esp + popl %edi + popl %ebx + popl %ebp + popl %esi + ret +.sha1_block_x86_end: + SIZE(sha1_block_x86,.sha1_block_x86_end-sha1_block_x86) +.ident "desasm.pl" +#endif |