summaryrefslogtreecommitdiffstats
path: root/secure
diff options
context:
space:
mode:
authorpeter <peter@FreeBSD.org>2002-05-03 00:14:39 +0000
committerpeter <peter@FreeBSD.org>2002-05-03 00:14:39 +0000
commitafb49cba0dff86548cb42f8d4eb03d29f5aa97d2 (patch)
tree85400debd2bd2c66ca232d0c5e3e6551ecd8fe2d /secure
parent4bd83f238785826252b148590755a82692a2dcfc (diff)
downloadFreeBSD-src-afb49cba0dff86548cb42f8d4eb03d29f5aa97d2.zip
FreeBSD-src-afb49cba0dff86548cb42f8d4eb03d29f5aa97d2.tar.gz
Pre-generate the optimized x86 crypto code and check it in rather than
depending on perl at build time. Makefile.asm is a helper for after the next import. With my cvs@ hat on, the relatively small repo cost of this is acceptable, especially given that we have other (much bigger) things like lib*.so.gz.uu checked in under src/lib/compat/*. Reviewed by: kris (maintainer)
Diffstat (limited to 'secure')
-rw-r--r--secure/lib/libcrypto/Makefile58
-rw-r--r--secure/lib/libcrypto/Makefile.asm63
-rw-r--r--secure/lib/libcrypto/Makefile.inc5
-rw-r--r--secure/lib/libcrypto/i386/bf-586.s932
-rw-r--r--secure/lib/libcrypto/i386/bf-686.s902
-rw-r--r--secure/lib/libcrypto/i386/bn-586.s890
-rw-r--r--secure/lib/libcrypto/i386/cast-586.s971
-rw-r--r--secure/lib/libcrypto/i386/co-586.s1270
-rw-r--r--secure/lib/libcrypto/i386/crypt586.s933
-rw-r--r--secure/lib/libcrypto/i386/des-586.s3154
-rw-r--r--secure/lib/libcrypto/i386/md5-586.s689
-rw-r--r--secure/lib/libcrypto/i386/rc4-586.s316
-rw-r--r--secure/lib/libcrypto/i386/rc5-586.s584
-rw-r--r--secure/lib/libcrypto/i386/rmd-586.s1975
-rw-r--r--secure/lib/libcrypto/i386/sha1-586.s1960
15 files changed, 14661 insertions, 41 deletions
diff --git a/secure/lib/libcrypto/Makefile b/secure/lib/libcrypto/Makefile
index f7bbd10..71166cb 100644
--- a/secure/lib/libcrypto/Makefile
+++ b/secure/lib/libcrypto/Makefile
@@ -16,13 +16,8 @@
${LCRYPTO_SRC}/stack ${LCRYPTO_SRC}/txt_db ${LCRYPTO_SRC}/x509 \
${LCRYPTO_SRC}/x509v3
-.if !defined(NOPERL) && !defined(NO_PERL) && ${MACHINE_ARCH} == "i386"
-.PATH: ${LCRYPTO_SRC}/rc4/asm ${LCRYPTO_SRC}/rc5/asm \
- ${LCRYPTO_SRC}/des/asm ${LCRYPTO_SRC}/cast/asm \
- ${LCRYPTO_SRC}/sha/asm ${LCRYPTO_SRC}/bn/asm \
- ${LCRYPTO_SRC}/bf/asm ${LCRYPTO_SRC}/md5/asm \
- ${LCRYPTO_SRC}/ripemd/asm
-PERLPATH= ${LCRYPTO_SRC}/des/asm:${LCRYPTO_SRC}/perlasm
+.if ${MACHINE_ARCH} == "i386"
+.PATH: ${.CURDIR}/i386
.endif
.if defined(MAKE_IDEA) && ${MAKE_IDEA} == YES
@@ -58,11 +53,11 @@ SRCS+= a_bitstr.c a_bmp.c a_bool.c a_bytes.c a_d2i_fp.c a_digest.c \
# blowfish
SRCS+= bf_cfb64.c bf_ecb.c bf_ofb64.c bf_skey.c
-.if !defined(NOPERL) && !defined(NO_PERL) && ${MACHINE_ARCH} == "i386"
+.if ${MACHINE_ARCH} == "i386"
.if ${MACHINE_CPU:Mi686}
-SRCS+= bf-686.pl
+SRCS+= bf-686.s
.else
-SRCS+= bf-586.pl
+SRCS+= bf-586.s
.endif
.else
SRCS+= bf_enc.c
@@ -79,8 +74,8 @@ SRCS+= bn_add.c bn_blind.c bn_ctx.c bn_div.c bn_err.c \
bn_exp.c bn_exp2.c bn_gcd.c bn_lib.c bn_mont.c bn_mpi.c \
bn_mul.c bn_prime.c bn_print.c bn_rand.c bn_recp.c bn_shift.c \
bn_sqr.c bn_word.c
-.if !defined(NOPERL) && !defined(NO_PERL) && ${MACHINE_ARCH} == "i386"
-SRCS+= bn-586.pl co-586.pl
+.if ${MACHINE_ARCH} == "i386"
+SRCS+= bn-586.s co-586.s
.else
SRCS+= bn_asm.c
.endif
@@ -90,8 +85,8 @@ SRCS+= buf_err.c buffer.c
# cast
SRCS+= c_cfb64.c c_ecb.c c_ofb64.c c_skey.c
-.if !defined(NOPERL) && !defined(NO_PERL) && ${MACHINE_ARCH} == "i386"
-SRCS+= cast-586.pl
+.if ${MACHINE_ARCH} == "i386"
+SRCS+= cast-586.s
.else
SRCS+= c_enc.c
.endif
@@ -108,8 +103,8 @@ SRCS+= cbc_cksm.c cbc_enc.c cfb64ede.c cfb64enc.c cfb_enc.c \
fcrypt.c ofb64ede.c ofb64enc.c ofb_enc.c pcbc_enc.c \
qud_cksm.c rand_key.c read2pwd.c read_pwd.c rpc_enc.c \
set_key.c str2key.c xcbc_enc.c rnd_keys.c
-.if !defined(NOPERL) && !defined(NO_PERL) && ${MACHINE_ARCH} == "i386"
-SRCS+= des-586.pl crypt586.pl
+.if ${MACHINE_ARCH} == "i386"
+SRCS+= des-586.s crypt586.s
.else
SRCS+= des_enc.c fcrypt_b.c
.endif
@@ -156,8 +151,8 @@ SRCS+= md4_dgst.c md4_one.c
# md5
SRCS+= md5_dgst.c md5_one.c
-.if !defined(NOPERL) && !defined(NO_PERL) && ${MACHINE_ARCH} == "i386"
-SRCS+= md5-586.pl
+.if ${MACHINE_ARCH} == "i386"
+SRCS+= md5-586.s
.endif
# mdc2
@@ -185,24 +180,24 @@ SRCS+= rc2_cbc.c rc2cfb64.c rc2_ecb.c rc2ofb64.c rc2_skey.c
# rc4
SRCS+= rc4_skey.c
-.if !defined(NOPERL) && !defined(NO_PERL) && ${MACHINE_ARCH} == "i386"
-SRCS+= rc4-586.pl
+.if ${MACHINE_ARCH} == "i386"
+SRCS+= rc4-586.s
.else
SRCS+= rc4_enc.c
.endif
# rc5
SRCS+= rc5cfb64.c rc5_ecb.c rc5ofb64.c rc5_skey.c
-.if !defined(NOPERL) && !defined(NO_PERL) && ${MACHINE_ARCH} == "i386"
-SRCS+= rc5-586.pl
+.if ${MACHINE_ARCH} == "i386"
+SRCS+= rc5-586.s
.else
SRCS+= rc5_enc.c
.endif
# ripemd
SRCS+= rmd_dgst.c rmd_one.c
-.if !defined(NOPERL) && !defined(NO_PERL) && ${MACHINE_ARCH} == "i386"
-SRCS+= rmd-586.pl
+.if ${MACHINE_ARCH} == "i386"
+SRCS+= rmd-586.s
.endif
# rsa
@@ -213,8 +208,8 @@ SRCS+= rsa_chk.c rsa_eay.c rsa_err.c rsa_gen.c rsa_lib.c rsa_none.c \
# sha
SRCS+= sha_dgst.c sha_one.c sha1_one.c sha1dgst.c
-.if !defined(NOPERL) && !defined(NO_PERL) && ${MACHINE_ARCH} == "i386"
-SRCS+= sha1-586.pl
+.if ${MACHINE_ARCH} == "i386"
+SRCS+= sha1-586.s
.endif
# stack
@@ -387,14 +382,3 @@ SYMLINKS+= lib${LIB}_p.a ${LIBDIR}/libdes_p.a
.endif
.include <bsd.lib.mk>
-
-.if !defined(NOPERL) && !defined(NO_PERL) && ${MACHINE_ARCH} == "i386"
-CLEANFILES+= ${SRCS:M*.pl:S/.pl$/.cmt/} ${SRCS:M*.pl:S/.pl$/.s/}
-.SUFFIXES: .pl .cmt
-
-.pl.cmt:
- perl -I${PERLPATH} ${.IMPSRC} elf ${CPUTYPE:Mi386:S/i//} > ${.TARGET}
-
-.cmt.s:
- tr -d "'" < ${.IMPSRC} > ${.TARGET}
-.endif
diff --git a/secure/lib/libcrypto/Makefile.asm b/secure/lib/libcrypto/Makefile.asm
new file mode 100644
index 0000000..f0b0ebc
--- /dev/null
+++ b/secure/lib/libcrypto/Makefile.asm
@@ -0,0 +1,63 @@
+# $FreeBSD$
+# Use this to help generate the asm *.s files after an import. It is not
+# perfect by any means, but does what is needed.
+# Do a 'make -f Makefile.asm all' and it will generate *.s. Move them
+# to the i386 subdir, and correct any exposed paths and $FreeBSD$ tags.
+
+.if ${MACHINE_ARCH} == "i386"
+
+.include "Makefile.inc"
+
+.PATH: ${LCRYPTO_SRC}/rc4/asm ${LCRYPTO_SRC}/rc5/asm \
+ ${LCRYPTO_SRC}/des/asm ${LCRYPTO_SRC}/cast/asm \
+ ${LCRYPTO_SRC}/sha/asm ${LCRYPTO_SRC}/bn/asm \
+ ${LCRYPTO_SRC}/bf/asm ${LCRYPTO_SRC}/md5/asm \
+ ${LCRYPTO_SRC}/ripemd/asm
+PERLPATH= ${LCRYPTO_SRC}/des/asm:${LCRYPTO_SRC}/perlasm
+
+SRCS=
+
+# blowfish
+SRCS+= bf-686.pl
+SRCS+= bf-586.pl
+
+# bn
+SRCS+= bn-586.pl co-586.pl
+
+# cast
+SRCS+= cast-586.pl
+
+# des
+SRCS+= des-586.pl crypt586.pl
+
+# md5
+SRCS+= md5-586.pl
+
+# rc4
+SRCS+= rc4-586.pl
+
+# rc5
+SRCS+= rc5-586.pl
+
+# ripemd
+SRCS+= rmd-586.pl
+
+# sha
+SRCS+= sha1-586.pl
+
+ASM= ${SRCS:S/.pl/.s/}
+
+all: ${ASM}
+
+CLEANFILES+= ${SRCS:M*.pl:S/.pl$/.cmt/} ${SRCS:M*.pl:S/.pl$/.s/}
+.SUFFIXES: .pl .cmt
+
+.pl.cmt:
+ perl -I${PERLPATH} ${.IMPSRC} elf ${CPUTYPE:Mi386:S/i//} > ${.TARGET}
+
+.cmt.s:
+ tr -d "'" < ${.IMPSRC} > ${.TARGET}
+
+
+.include <bsd.prog.mk>
+.endif
diff --git a/secure/lib/libcrypto/Makefile.inc b/secure/lib/libcrypto/Makefile.inc
index 3e9fc28..ec09438 100644
--- a/secure/lib/libcrypto/Makefile.inc
+++ b/secure/lib/libcrypto/Makefile.inc
@@ -7,10 +7,7 @@ CFLAGS+= -DNO_IDEA
.endif
.if ${MACHINE_ARCH} == "i386"
-CFLAGS+= -DL_ENDIAN
-.if !defined(NOPERL) && !defined(NO_PERL)
-CFLAGS+= -DSHA1_ASM -DBN_ASM -DMD5_ASM -DRMD160_ASM
-.endif
+CFLAGS+= -DL_ENDIAN -DSHA1_ASM -DBN_ASM -DMD5_ASM -DRMD160_ASM
.elif ${MACHINE_ARCH} == "alpha"
# no ENDIAN stuff defined for alpha (64-bit)
.endif
diff --git a/secure/lib/libcrypto/i386/bf-586.s b/secure/lib/libcrypto/i386/bf-586.s
new file mode 100644
index 0000000..0965b1c
--- /dev/null
+++ b/secure/lib/libcrypto/i386/bf-586.s
@@ -0,0 +1,932 @@
+ # $FreeBSD$
+ # Dont even think of reading this code
+ # It was automatically generated by bf-586.pl
+ # Which is a perl program used to generate the x86 assember for
+ # any of elf, a.out, BSDI, Win32, gaswin (for GNU as on Win32) or Solaris
+ # eric <eay@cryptsoft.com>
+
+ .file "bf-586.s"
+ .version "01.01"
+gcc2_compiled.:
+.text
+ .align 16
+.globl BF_encrypt
+ .type BF_encrypt,@function
+BF_encrypt:
+
+ pushl %ebp
+ pushl %ebx
+ movl 12(%esp), %ebx
+ movl 16(%esp), %ebp
+ pushl %esi
+ pushl %edi
+ # Load the 2 words
+ movl (%ebx), %edi
+ movl 4(%ebx), %esi
+ xorl %eax, %eax
+ movl (%ebp), %ebx
+ xorl %ecx, %ecx
+ xorl %ebx, %edi
+
+ # Round 0
+ movl 4(%ebp), %edx
+ movl %edi, %ebx
+ xorl %edx, %esi
+ shrl $16, %ebx
+ movl %edi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %esi
+
+ # Round 1
+ movl 8(%ebp), %edx
+ movl %esi, %ebx
+ xorl %edx, %edi
+ shrl $16, %ebx
+ movl %esi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %edi
+
+ # Round 2
+ movl 12(%ebp), %edx
+ movl %edi, %ebx
+ xorl %edx, %esi
+ shrl $16, %ebx
+ movl %edi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %esi
+
+ # Round 3
+ movl 16(%ebp), %edx
+ movl %esi, %ebx
+ xorl %edx, %edi
+ shrl $16, %ebx
+ movl %esi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %edi
+
+ # Round 4
+ movl 20(%ebp), %edx
+ movl %edi, %ebx
+ xorl %edx, %esi
+ shrl $16, %ebx
+ movl %edi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %esi
+
+ # Round 5
+ movl 24(%ebp), %edx
+ movl %esi, %ebx
+ xorl %edx, %edi
+ shrl $16, %ebx
+ movl %esi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %edi
+
+ # Round 6
+ movl 28(%ebp), %edx
+ movl %edi, %ebx
+ xorl %edx, %esi
+ shrl $16, %ebx
+ movl %edi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %esi
+
+ # Round 7
+ movl 32(%ebp), %edx
+ movl %esi, %ebx
+ xorl %edx, %edi
+ shrl $16, %ebx
+ movl %esi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %edi
+
+ # Round 8
+ movl 36(%ebp), %edx
+ movl %edi, %ebx
+ xorl %edx, %esi
+ shrl $16, %ebx
+ movl %edi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %esi
+
+ # Round 9
+ movl 40(%ebp), %edx
+ movl %esi, %ebx
+ xorl %edx, %edi
+ shrl $16, %ebx
+ movl %esi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %edi
+
+ # Round 10
+ movl 44(%ebp), %edx
+ movl %edi, %ebx
+ xorl %edx, %esi
+ shrl $16, %ebx
+ movl %edi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %esi
+
+ # Round 11
+ movl 48(%ebp), %edx
+ movl %esi, %ebx
+ xorl %edx, %edi
+ shrl $16, %ebx
+ movl %esi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %edi
+
+ # Round 12
+ movl 52(%ebp), %edx
+ movl %edi, %ebx
+ xorl %edx, %esi
+ shrl $16, %ebx
+ movl %edi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %esi
+
+ # Round 13
+ movl 56(%ebp), %edx
+ movl %esi, %ebx
+ xorl %edx, %edi
+ shrl $16, %ebx
+ movl %esi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %edi
+
+ # Round 14
+ movl 60(%ebp), %edx
+ movl %edi, %ebx
+ xorl %edx, %esi
+ shrl $16, %ebx
+ movl %edi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %esi
+
+ # Round 15
+ movl 64(%ebp), %edx
+ movl %esi, %ebx
+ xorl %edx, %edi
+ shrl $16, %ebx
+ movl %esi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ # Load parameter 0 (16) enc=1
+ movl 20(%esp), %eax
+ xorl %ebx, %edi
+ movl 68(%ebp), %edx
+ xorl %edx, %esi
+ movl %edi, 4(%eax)
+ movl %esi, (%eax)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.BF_encrypt_end:
+ .size BF_encrypt,.BF_encrypt_end-BF_encrypt
+.ident "BF_encrypt"
+.text
+ .align 16
+.globl BF_decrypt
+ .type BF_decrypt,@function
+BF_decrypt:
+
+ pushl %ebp
+ pushl %ebx
+ movl 12(%esp), %ebx
+ movl 16(%esp), %ebp
+ pushl %esi
+ pushl %edi
+ # Load the 2 words
+ movl (%ebx), %edi
+ movl 4(%ebx), %esi
+ xorl %eax, %eax
+ movl 68(%ebp), %ebx
+ xorl %ecx, %ecx
+ xorl %ebx, %edi
+
+ # Round 16
+ movl 64(%ebp), %edx
+ movl %edi, %ebx
+ xorl %edx, %esi
+ shrl $16, %ebx
+ movl %edi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %esi
+
+ # Round 15
+ movl 60(%ebp), %edx
+ movl %esi, %ebx
+ xorl %edx, %edi
+ shrl $16, %ebx
+ movl %esi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %edi
+
+ # Round 14
+ movl 56(%ebp), %edx
+ movl %edi, %ebx
+ xorl %edx, %esi
+ shrl $16, %ebx
+ movl %edi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %esi
+
+ # Round 13
+ movl 52(%ebp), %edx
+ movl %esi, %ebx
+ xorl %edx, %edi
+ shrl $16, %ebx
+ movl %esi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %edi
+
+ # Round 12
+ movl 48(%ebp), %edx
+ movl %edi, %ebx
+ xorl %edx, %esi
+ shrl $16, %ebx
+ movl %edi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %esi
+
+ # Round 11
+ movl 44(%ebp), %edx
+ movl %esi, %ebx
+ xorl %edx, %edi
+ shrl $16, %ebx
+ movl %esi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %edi
+
+ # Round 10
+ movl 40(%ebp), %edx
+ movl %edi, %ebx
+ xorl %edx, %esi
+ shrl $16, %ebx
+ movl %edi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %esi
+
+ # Round 9
+ movl 36(%ebp), %edx
+ movl %esi, %ebx
+ xorl %edx, %edi
+ shrl $16, %ebx
+ movl %esi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %edi
+
+ # Round 8
+ movl 32(%ebp), %edx
+ movl %edi, %ebx
+ xorl %edx, %esi
+ shrl $16, %ebx
+ movl %edi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %esi
+
+ # Round 7
+ movl 28(%ebp), %edx
+ movl %esi, %ebx
+ xorl %edx, %edi
+ shrl $16, %ebx
+ movl %esi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %edi
+
+ # Round 6
+ movl 24(%ebp), %edx
+ movl %edi, %ebx
+ xorl %edx, %esi
+ shrl $16, %ebx
+ movl %edi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %esi
+
+ # Round 5
+ movl 20(%ebp), %edx
+ movl %esi, %ebx
+ xorl %edx, %edi
+ shrl $16, %ebx
+ movl %esi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %edi
+
+ # Round 4
+ movl 16(%ebp), %edx
+ movl %edi, %ebx
+ xorl %edx, %esi
+ shrl $16, %ebx
+ movl %edi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %esi
+
+ # Round 3
+ movl 12(%ebp), %edx
+ movl %esi, %ebx
+ xorl %edx, %edi
+ shrl $16, %ebx
+ movl %esi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %edi
+
+ # Round 2
+ movl 8(%ebp), %edx
+ movl %edi, %ebx
+ xorl %edx, %esi
+ shrl $16, %ebx
+ movl %edi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %esi
+
+ # Round 1
+ movl 4(%ebp), %edx
+ movl %esi, %ebx
+ xorl %edx, %edi
+ shrl $16, %ebx
+ movl %esi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ # Load parameter 0 (1) enc=0
+ movl 20(%esp), %eax
+ xorl %ebx, %edi
+ movl (%ebp), %edx
+ xorl %edx, %esi
+ movl %edi, 4(%eax)
+ movl %esi, (%eax)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.BF_decrypt_end:
+ .size BF_decrypt,.BF_decrypt_end-BF_decrypt
+.ident "BF_decrypt"
+.text
+ .align 16
+.globl BF_cbc_encrypt
+ .type BF_cbc_encrypt,@function
+BF_cbc_encrypt:
+
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 28(%esp), %ebp
+ # getting iv ptr from parameter 4
+ movl 36(%esp), %ebx
+ movl (%ebx), %esi
+ movl 4(%ebx), %edi
+ pushl %edi
+ pushl %esi
+ pushl %edi
+ pushl %esi
+ movl %esp, %ebx
+ movl 36(%esp), %esi
+ movl 40(%esp), %edi
+ # getting encrypt flag from parameter 5
+ movl 56(%esp), %ecx
+ # get and push parameter 3
+ movl 48(%esp), %eax
+ pushl %eax
+ pushl %ebx
+ cmpl $0, %ecx
+ jz .L000decrypt
+ andl $4294967288, %ebp
+ movl 8(%esp), %eax
+ movl 12(%esp), %ebx
+ jz .L001encrypt_finish
+.L002encrypt_loop:
+ movl (%esi), %ecx
+ movl 4(%esi), %edx
+ xorl %ecx, %eax
+ xorl %edx, %ebx
+.byte 15
+.byte 200 # bswapl %eax
+.byte 15
+.byte 203 # bswapl %ebx
+ movl %eax, 8(%esp)
+ movl %ebx, 12(%esp)
+ call BF_encrypt
+ movl 8(%esp), %eax
+ movl 12(%esp), %ebx
+.byte 15
+.byte 200 # bswapl %eax
+.byte 15
+.byte 203 # bswapl %ebx
+ movl %eax, (%edi)
+ movl %ebx, 4(%edi)
+ addl $8, %esi
+ addl $8, %edi
+ subl $8, %ebp
+ jnz .L002encrypt_loop
+.L001encrypt_finish:
+ movl 52(%esp), %ebp
+ andl $7, %ebp
+ jz .L003finish
+ xorl %ecx, %ecx
+ xorl %edx, %edx
+ movl .L004cbc_enc_jmp_table(,%ebp,4),%ebp
+ jmp *%ebp
+.L005ej7:
+ movb 6(%esi), %dh
+ sall $8, %edx
+.L006ej6:
+ movb 5(%esi), %dh
+.L007ej5:
+ movb 4(%esi), %dl
+.L008ej4:
+ movl (%esi), %ecx
+ jmp .L009ejend
+.L010ej3:
+ movb 2(%esi), %ch
+ sall $8, %ecx
+.L011ej2:
+ movb 1(%esi), %ch
+.L012ej1:
+ movb (%esi), %cl
+.L009ejend:
+ xorl %ecx, %eax
+ xorl %edx, %ebx
+.byte 15
+.byte 200 # bswapl %eax
+.byte 15
+.byte 203 # bswapl %ebx
+ movl %eax, 8(%esp)
+ movl %ebx, 12(%esp)
+ call BF_encrypt
+ movl 8(%esp), %eax
+ movl 12(%esp), %ebx
+.byte 15
+.byte 200 # bswapl %eax
+.byte 15
+.byte 203 # bswapl %ebx
+ movl %eax, (%edi)
+ movl %ebx, 4(%edi)
+ jmp .L003finish
+.align 16
+.L000decrypt:
+ andl $4294967288, %ebp
+ movl 16(%esp), %eax
+ movl 20(%esp), %ebx
+ jz .L013decrypt_finish
+.L014decrypt_loop:
+ movl (%esi), %eax
+ movl 4(%esi), %ebx
+.byte 15
+.byte 200 # bswapl %eax
+.byte 15
+.byte 203 # bswapl %ebx
+ movl %eax, 8(%esp)
+ movl %ebx, 12(%esp)
+ call BF_decrypt
+ movl 8(%esp), %eax
+ movl 12(%esp), %ebx
+.byte 15
+.byte 200 # bswapl %eax
+.byte 15
+.byte 203 # bswapl %ebx
+ movl 16(%esp), %ecx
+ movl 20(%esp), %edx
+ xorl %eax, %ecx
+ xorl %ebx, %edx
+ movl (%esi), %eax
+ movl 4(%esi), %ebx
+ movl %ecx, (%edi)
+ movl %edx, 4(%edi)
+ movl %eax, 16(%esp)
+ movl %ebx, 20(%esp)
+ addl $8, %esi
+ addl $8, %edi
+ subl $8, %ebp
+ jnz .L014decrypt_loop
+.L013decrypt_finish:
+ movl 52(%esp), %ebp
+ andl $7, %ebp
+ jz .L003finish
+ movl (%esi), %eax
+ movl 4(%esi), %ebx
+.byte 15
+.byte 200 # bswapl %eax
+.byte 15
+.byte 203 # bswapl %ebx
+ movl %eax, 8(%esp)
+ movl %ebx, 12(%esp)
+ call BF_decrypt
+ movl 8(%esp), %eax
+ movl 12(%esp), %ebx
+.byte 15
+.byte 200 # bswapl %eax
+.byte 15
+.byte 203 # bswapl %ebx
+ movl 16(%esp), %ecx
+ movl 20(%esp), %edx
+ xorl %eax, %ecx
+ xorl %ebx, %edx
+ movl (%esi), %eax
+ movl 4(%esi), %ebx
+.L015dj7:
+ rorl $16, %edx
+ movb %dl, 6(%edi)
+ shrl $16, %edx
+.L016dj6:
+ movb %dh, 5(%edi)
+.L017dj5:
+ movb %dl, 4(%edi)
+.L018dj4:
+ movl %ecx, (%edi)
+ jmp .L019djend
+.L020dj3:
+ rorl $16, %ecx
+ movb %cl, 2(%edi)
+ sall $16, %ecx
+.L021dj2:
+ movb %ch, 1(%esi)
+.L022dj1:
+ movb %cl, (%esi)
+.L019djend:
+ jmp .L003finish
+.align 16
+.L003finish:
+ movl 60(%esp), %ecx
+ addl $24, %esp
+ movl %eax, (%ecx)
+ movl %ebx, 4(%ecx)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 16
+.L004cbc_enc_jmp_table:
+ .long 0
+ .long .L012ej1
+ .long .L011ej2
+ .long .L010ej3
+ .long .L008ej4
+ .long .L007ej5
+ .long .L006ej6
+ .long .L005ej7
+.align 16
+.L023cbc_dec_jmp_table:
+ .long 0
+ .long .L022dj1
+ .long .L021dj2
+ .long .L020dj3
+ .long .L018dj4
+ .long .L017dj5
+ .long .L016dj6
+ .long .L015dj7
+.L_BF_cbc_encrypt_end:
+ .size BF_cbc_encrypt,.L_BF_cbc_encrypt_end-BF_cbc_encrypt
+.ident "desasm.pl"
diff --git a/secure/lib/libcrypto/i386/bf-686.s b/secure/lib/libcrypto/i386/bf-686.s
new file mode 100644
index 0000000..bb3b9c7
--- /dev/null
+++ b/secure/lib/libcrypto/i386/bf-686.s
@@ -0,0 +1,902 @@
+ # $FreeBSD$
+ # Dont even think of reading this code
+ # It was automatically generated by bf-686.pl
+ # Which is a perl program used to generate the x86 assember for
+ # any of elf, a.out, BSDI, Win32, gaswin (for GNU as on Win32) or Solaris
+ # eric <eay@cryptsoft.com>
+
+ .file "bf-686.s"
+ .version "01.01"
+gcc2_compiled.:
+.text
+ .align 16
+.globl BF_encrypt
+ .type BF_encrypt,@function
+BF_encrypt:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+
+
+ # Load the 2 words
+ movl 20(%esp), %eax
+ movl (%eax), %ecx
+ movl 4(%eax), %edx
+
+ # P pointer, s and enc flag
+ movl 24(%esp), %edi
+ xorl %eax, %eax
+ xorl %ebx, %ebx
+ xorl (%edi), %ecx
+
+ # Round 0
+ rorl $16, %ecx
+ movl 4(%edi), %esi
+ movb %ch, %al
+ movb %cl, %bl
+ rorl $16, %ecx
+ xorl %esi, %edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch, %al
+ movb %cl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %edx
+
+ # Round 1
+ rorl $16, %edx
+ movl 8(%edi), %esi
+ movb %dh, %al
+ movb %dl, %bl
+ rorl $16, %edx
+ xorl %esi, %ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh, %al
+ movb %dl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %ecx
+
+ # Round 2
+ rorl $16, %ecx
+ movl 12(%edi), %esi
+ movb %ch, %al
+ movb %cl, %bl
+ rorl $16, %ecx
+ xorl %esi, %edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch, %al
+ movb %cl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %edx
+
+ # Round 3
+ rorl $16, %edx
+ movl 16(%edi), %esi
+ movb %dh, %al
+ movb %dl, %bl
+ rorl $16, %edx
+ xorl %esi, %ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh, %al
+ movb %dl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %ecx
+
+ # Round 4
+ rorl $16, %ecx
+ movl 20(%edi), %esi
+ movb %ch, %al
+ movb %cl, %bl
+ rorl $16, %ecx
+ xorl %esi, %edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch, %al
+ movb %cl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %edx
+
+ # Round 5
+ rorl $16, %edx
+ movl 24(%edi), %esi
+ movb %dh, %al
+ movb %dl, %bl
+ rorl $16, %edx
+ xorl %esi, %ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh, %al
+ movb %dl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %ecx
+
+ # Round 6
+ rorl $16, %ecx
+ movl 28(%edi), %esi
+ movb %ch, %al
+ movb %cl, %bl
+ rorl $16, %ecx
+ xorl %esi, %edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch, %al
+ movb %cl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %edx
+
+ # Round 7
+ rorl $16, %edx
+ movl 32(%edi), %esi
+ movb %dh, %al
+ movb %dl, %bl
+ rorl $16, %edx
+ xorl %esi, %ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh, %al
+ movb %dl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %ecx
+
+ # Round 8
+ rorl $16, %ecx
+ movl 36(%edi), %esi
+ movb %ch, %al
+ movb %cl, %bl
+ rorl $16, %ecx
+ xorl %esi, %edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch, %al
+ movb %cl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %edx
+
+ # Round 9
+ rorl $16, %edx
+ movl 40(%edi), %esi
+ movb %dh, %al
+ movb %dl, %bl
+ rorl $16, %edx
+ xorl %esi, %ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh, %al
+ movb %dl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %ecx
+
+ # Round 10
+ rorl $16, %ecx
+ movl 44(%edi), %esi
+ movb %ch, %al
+ movb %cl, %bl
+ rorl $16, %ecx
+ xorl %esi, %edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch, %al
+ movb %cl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %edx
+
+ # Round 11
+ rorl $16, %edx
+ movl 48(%edi), %esi
+ movb %dh, %al
+ movb %dl, %bl
+ rorl $16, %edx
+ xorl %esi, %ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh, %al
+ movb %dl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %ecx
+
+ # Round 12
+ rorl $16, %ecx
+ movl 52(%edi), %esi
+ movb %ch, %al
+ movb %cl, %bl
+ rorl $16, %ecx
+ xorl %esi, %edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch, %al
+ movb %cl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %edx
+
+ # Round 13
+ rorl $16, %edx
+ movl 56(%edi), %esi
+ movb %dh, %al
+ movb %dl, %bl
+ rorl $16, %edx
+ xorl %esi, %ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh, %al
+ movb %dl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %ecx
+
+ # Round 14
+ rorl $16, %ecx
+ movl 60(%edi), %esi
+ movb %ch, %al
+ movb %cl, %bl
+ rorl $16, %ecx
+ xorl %esi, %edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch, %al
+ movb %cl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %edx
+
+ # Round 15
+ rorl $16, %edx
+ movl 64(%edi), %esi
+ movb %dh, %al
+ movb %dl, %bl
+ rorl $16, %edx
+ xorl %esi, %ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh, %al
+ movb %dl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %ecx
+ xorl 68(%edi), %edx
+ movl 20(%esp), %eax
+ movl %edx, (%eax)
+ movl %ecx, 4(%eax)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.L_BF_encrypt_end:
+ .size BF_encrypt,.L_BF_encrypt_end-BF_encrypt
+.ident "desasm.pl"
+.text
+ .align 16
+.globl BF_decrypt
+ .type BF_decrypt,@function
+BF_decrypt:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+
+
+ # Load the 2 words
+ movl 20(%esp), %eax
+ movl (%eax), %ecx
+ movl 4(%eax), %edx
+
+ # P pointer, s and enc flag
+ movl 24(%esp), %edi
+ xorl %eax, %eax
+ xorl %ebx, %ebx
+ xorl 68(%edi), %ecx
+
+ # Round 16
+ rorl $16, %ecx
+ movl 64(%edi), %esi
+ movb %ch, %al
+ movb %cl, %bl
+ rorl $16, %ecx
+ xorl %esi, %edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch, %al
+ movb %cl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %edx
+
+ # Round 15
+ rorl $16, %edx
+ movl 60(%edi), %esi
+ movb %dh, %al
+ movb %dl, %bl
+ rorl $16, %edx
+ xorl %esi, %ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh, %al
+ movb %dl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %ecx
+
+ # Round 14
+ rorl $16, %ecx
+ movl 56(%edi), %esi
+ movb %ch, %al
+ movb %cl, %bl
+ rorl $16, %ecx
+ xorl %esi, %edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch, %al
+ movb %cl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %edx
+
+ # Round 13
+ rorl $16, %edx
+ movl 52(%edi), %esi
+ movb %dh, %al
+ movb %dl, %bl
+ rorl $16, %edx
+ xorl %esi, %ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh, %al
+ movb %dl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %ecx
+
+ # Round 12
+ rorl $16, %ecx
+ movl 48(%edi), %esi
+ movb %ch, %al
+ movb %cl, %bl
+ rorl $16, %ecx
+ xorl %esi, %edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch, %al
+ movb %cl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %edx
+
+ # Round 11
+ rorl $16, %edx
+ movl 44(%edi), %esi
+ movb %dh, %al
+ movb %dl, %bl
+ rorl $16, %edx
+ xorl %esi, %ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh, %al
+ movb %dl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %ecx
+
+ # Round 10
+ rorl $16, %ecx
+ movl 40(%edi), %esi
+ movb %ch, %al
+ movb %cl, %bl
+ rorl $16, %ecx
+ xorl %esi, %edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch, %al
+ movb %cl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %edx
+
+ # Round 9
+ rorl $16, %edx
+ movl 36(%edi), %esi
+ movb %dh, %al
+ movb %dl, %bl
+ rorl $16, %edx
+ xorl %esi, %ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh, %al
+ movb %dl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %ecx
+
+ # Round 8
+ rorl $16, %ecx
+ movl 32(%edi), %esi
+ movb %ch, %al
+ movb %cl, %bl
+ rorl $16, %ecx
+ xorl %esi, %edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch, %al
+ movb %cl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %edx
+
+ # Round 7
+ rorl $16, %edx
+ movl 28(%edi), %esi
+ movb %dh, %al
+ movb %dl, %bl
+ rorl $16, %edx
+ xorl %esi, %ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh, %al
+ movb %dl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %ecx
+
+ # Round 6
+ rorl $16, %ecx
+ movl 24(%edi), %esi
+ movb %ch, %al
+ movb %cl, %bl
+ rorl $16, %ecx
+ xorl %esi, %edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch, %al
+ movb %cl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %edx
+
+ # Round 5
+ rorl $16, %edx
+ movl 20(%edi), %esi
+ movb %dh, %al
+ movb %dl, %bl
+ rorl $16, %edx
+ xorl %esi, %ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh, %al
+ movb %dl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %ecx
+
+ # Round 4
+ rorl $16, %ecx
+ movl 16(%edi), %esi
+ movb %ch, %al
+ movb %cl, %bl
+ rorl $16, %ecx
+ xorl %esi, %edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch, %al
+ movb %cl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %edx
+
+ # Round 3
+ rorl $16, %edx
+ movl 12(%edi), %esi
+ movb %dh, %al
+ movb %dl, %bl
+ rorl $16, %edx
+ xorl %esi, %ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh, %al
+ movb %dl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %ecx
+
+ # Round 2
+ rorl $16, %ecx
+ movl 8(%edi), %esi
+ movb %ch, %al
+ movb %cl, %bl
+ rorl $16, %ecx
+ xorl %esi, %edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch, %al
+ movb %cl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %edx
+
+ # Round 1
+ rorl $16, %edx
+ movl 4(%edi), %esi
+ movb %dh, %al
+ movb %dl, %bl
+ rorl $16, %edx
+ xorl %esi, %ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh, %al
+ movb %dl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %ecx
+ xorl (%edi), %edx
+ movl 20(%esp), %eax
+ movl %edx, (%eax)
+ movl %ecx, 4(%eax)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.L_BF_decrypt_end:
+ .size BF_decrypt,.L_BF_decrypt_end-BF_decrypt
+.ident "desasm.pl"
+.text
+ .align 16
+.globl BF_cbc_encrypt
+ .type BF_cbc_encrypt,@function
+BF_cbc_encrypt:
+
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 28(%esp), %ebp
+ # getting iv ptr from parameter 4
+ movl 36(%esp), %ebx
+ movl (%ebx), %esi
+ movl 4(%ebx), %edi
+ pushl %edi
+ pushl %esi
+ pushl %edi
+ pushl %esi
+ movl %esp, %ebx
+ movl 36(%esp), %esi
+ movl 40(%esp), %edi
+ # getting encrypt flag from parameter 5
+ movl 56(%esp), %ecx
+ # get and push parameter 3
+ movl 48(%esp), %eax
+ pushl %eax
+ pushl %ebx
+ cmpl $0, %ecx
+ jz .L000decrypt
+ andl $4294967288, %ebp
+ movl 8(%esp), %eax
+ movl 12(%esp), %ebx
+ jz .L001encrypt_finish
+.L002encrypt_loop:
+ movl (%esi), %ecx
+ movl 4(%esi), %edx
+ xorl %ecx, %eax
+ xorl %edx, %ebx
+.byte 15
+.byte 200 # bswapl %eax
+.byte 15
+.byte 203 # bswapl %ebx
+ movl %eax, 8(%esp)
+ movl %ebx, 12(%esp)
+ call BF_encrypt
+ movl 8(%esp), %eax
+ movl 12(%esp), %ebx
+.byte 15
+.byte 200 # bswapl %eax
+.byte 15
+.byte 203 # bswapl %ebx
+ movl %eax, (%edi)
+ movl %ebx, 4(%edi)
+ addl $8, %esi
+ addl $8, %edi
+ subl $8, %ebp
+ jnz .L002encrypt_loop
+.L001encrypt_finish:
+ movl 52(%esp), %ebp
+ andl $7, %ebp
+ jz .L003finish
+ xorl %ecx, %ecx
+ xorl %edx, %edx
+ movl .L004cbc_enc_jmp_table(,%ebp,4),%ebp
+ jmp *%ebp
+.L005ej7:
+ movb 6(%esi), %dh
+ sall $8, %edx
+.L006ej6:
+ movb 5(%esi), %dh
+.L007ej5:
+ movb 4(%esi), %dl
+.L008ej4:
+ movl (%esi), %ecx
+ jmp .L009ejend
+.L010ej3:
+ movb 2(%esi), %ch
+ sall $8, %ecx
+.L011ej2:
+ movb 1(%esi), %ch
+.L012ej1:
+ movb (%esi), %cl
+.L009ejend:
+ xorl %ecx, %eax
+ xorl %edx, %ebx
+.byte 15
+.byte 200 # bswapl %eax
+.byte 15
+.byte 203 # bswapl %ebx
+ movl %eax, 8(%esp)
+ movl %ebx, 12(%esp)
+ call BF_encrypt
+ movl 8(%esp), %eax
+ movl 12(%esp), %ebx
+.byte 15
+.byte 200 # bswapl %eax
+.byte 15
+.byte 203 # bswapl %ebx
+ movl %eax, (%edi)
+ movl %ebx, 4(%edi)
+ jmp .L003finish
+.align 16
+.L000decrypt:
+ andl $4294967288, %ebp
+ movl 16(%esp), %eax
+ movl 20(%esp), %ebx
+ jz .L013decrypt_finish
+.L014decrypt_loop:
+ movl (%esi), %eax
+ movl 4(%esi), %ebx
+.byte 15
+.byte 200 # bswapl %eax
+.byte 15
+.byte 203 # bswapl %ebx
+ movl %eax, 8(%esp)
+ movl %ebx, 12(%esp)
+ call BF_decrypt
+ movl 8(%esp), %eax
+ movl 12(%esp), %ebx
+.byte 15
+.byte 200 # bswapl %eax
+.byte 15
+.byte 203 # bswapl %ebx
+ movl 16(%esp), %ecx
+ movl 20(%esp), %edx
+ xorl %eax, %ecx
+ xorl %ebx, %edx
+ movl (%esi), %eax
+ movl 4(%esi), %ebx
+ movl %ecx, (%edi)
+ movl %edx, 4(%edi)
+ movl %eax, 16(%esp)
+ movl %ebx, 20(%esp)
+ addl $8, %esi
+ addl $8, %edi
+ subl $8, %ebp
+ jnz .L014decrypt_loop
+.L013decrypt_finish:
+ movl 52(%esp), %ebp
+ andl $7, %ebp
+ jz .L003finish
+ movl (%esi), %eax
+ movl 4(%esi), %ebx
+.byte 15
+.byte 200 # bswapl %eax
+.byte 15
+.byte 203 # bswapl %ebx
+ movl %eax, 8(%esp)
+ movl %ebx, 12(%esp)
+ call BF_decrypt
+ movl 8(%esp), %eax
+ movl 12(%esp), %ebx
+.byte 15
+.byte 200 # bswapl %eax
+.byte 15
+.byte 203 # bswapl %ebx
+ movl 16(%esp), %ecx
+ movl 20(%esp), %edx
+ xorl %eax, %ecx
+ xorl %ebx, %edx
+ movl (%esi), %eax
+ movl 4(%esi), %ebx
+.L015dj7:
+ rorl $16, %edx
+ movb %dl, 6(%edi)
+ shrl $16, %edx
+.L016dj6:
+ movb %dh, 5(%edi)
+.L017dj5:
+ movb %dl, 4(%edi)
+.L018dj4:
+ movl %ecx, (%edi)
+ jmp .L019djend
+.L020dj3:
+ rorl $16, %ecx
+ movb %cl, 2(%edi)
+ sall $16, %ecx
+.L021dj2:
+ movb %ch, 1(%esi)
+.L022dj1:
+ movb %cl, (%esi)
+.L019djend:
+ jmp .L003finish
+.align 16
+.L003finish:
+ movl 60(%esp), %ecx
+ addl $24, %esp
+ movl %eax, (%ecx)
+ movl %ebx, 4(%ecx)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 16
+.L004cbc_enc_jmp_table:
+ .long 0
+ .long .L012ej1
+ .long .L011ej2
+ .long .L010ej3
+ .long .L008ej4
+ .long .L007ej5
+ .long .L006ej6
+ .long .L005ej7
+.align 16
+.L023cbc_dec_jmp_table:
+ .long 0
+ .long .L022dj1
+ .long .L021dj2
+ .long .L020dj3
+ .long .L018dj4
+ .long .L017dj5
+ .long .L016dj6
+ .long .L015dj7
+.L_BF_cbc_encrypt_end:
+ .size BF_cbc_encrypt,.L_BF_cbc_encrypt_end-BF_cbc_encrypt
+.ident "desasm.pl"
diff --git a/secure/lib/libcrypto/i386/bn-586.s b/secure/lib/libcrypto/i386/bn-586.s
new file mode 100644
index 0000000..3ea4a8a
--- /dev/null
+++ b/secure/lib/libcrypto/i386/bn-586.s
@@ -0,0 +1,890 @@
+ # $FreeBSD$
+ # Dont even think of reading this code
+ # It was automatically generated by bn-586.pl
+ # Which is a perl program used to generate the x86 assember for
+ # any of elf, a.out, BSDI, Win32, gaswin (for GNU as on Win32) or Solaris
+ # eric <eay@cryptsoft.com>
+
+ .file "bn-586.s"
+ .version "01.01"
+gcc2_compiled.:
+.text
+ .align 16
+.globl bn_mul_add_words
+ .type bn_mul_add_words,@function
+bn_mul_add_words:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+
+
+ xorl %esi, %esi
+ movl 20(%esp), %edi
+ movl 28(%esp), %ecx
+ movl 24(%esp), %ebx
+ andl $4294967288, %ecx
+ movl 32(%esp), %ebp
+ pushl %ecx
+ jz .L000maw_finish
+.L001maw_loop:
+ movl %ecx, (%esp)
+ # Round 0
+ movl (%ebx), %eax
+ mull %ebp
+ addl %esi, %eax
+ movl (%edi), %esi
+ adcl $0, %edx
+ addl %esi, %eax
+ adcl $0, %edx
+ movl %eax, (%edi)
+ movl %edx, %esi
+ # Round 4
+ movl 4(%ebx), %eax
+ mull %ebp
+ addl %esi, %eax
+ movl 4(%edi), %esi
+ adcl $0, %edx
+ addl %esi, %eax
+ adcl $0, %edx
+ movl %eax, 4(%edi)
+ movl %edx, %esi
+ # Round 8
+ movl 8(%ebx), %eax
+ mull %ebp
+ addl %esi, %eax
+ movl 8(%edi), %esi
+ adcl $0, %edx
+ addl %esi, %eax
+ adcl $0, %edx
+ movl %eax, 8(%edi)
+ movl %edx, %esi
+ # Round 12
+ movl 12(%ebx), %eax
+ mull %ebp
+ addl %esi, %eax
+ movl 12(%edi), %esi
+ adcl $0, %edx
+ addl %esi, %eax
+ adcl $0, %edx
+ movl %eax, 12(%edi)
+ movl %edx, %esi
+ # Round 16
+ movl 16(%ebx), %eax
+ mull %ebp
+ addl %esi, %eax
+ movl 16(%edi), %esi
+ adcl $0, %edx
+ addl %esi, %eax
+ adcl $0, %edx
+ movl %eax, 16(%edi)
+ movl %edx, %esi
+ # Round 20
+ movl 20(%ebx), %eax
+ mull %ebp
+ addl %esi, %eax
+ movl 20(%edi), %esi
+ adcl $0, %edx
+ addl %esi, %eax
+ adcl $0, %edx
+ movl %eax, 20(%edi)
+ movl %edx, %esi
+ # Round 24
+ movl 24(%ebx), %eax
+ mull %ebp
+ addl %esi, %eax
+ movl 24(%edi), %esi
+ adcl $0, %edx
+ addl %esi, %eax
+ adcl $0, %edx
+ movl %eax, 24(%edi)
+ movl %edx, %esi
+ # Round 28
+ movl 28(%ebx), %eax
+ mull %ebp
+ addl %esi, %eax
+ movl 28(%edi), %esi
+ adcl $0, %edx
+ addl %esi, %eax
+ adcl $0, %edx
+ movl %eax, 28(%edi)
+ movl %edx, %esi
+
+ movl (%esp), %ecx
+ addl $32, %ebx
+ addl $32, %edi
+ subl $8, %ecx
+ jnz .L001maw_loop
+.L000maw_finish:
+ movl 32(%esp), %ecx
+ andl $7, %ecx
+ jnz .L002maw_finish2
+ jmp .L003maw_end
+.align 16
+.L002maw_finish2:
+ # Tail Round 0
+ movl (%ebx), %eax
+ mull %ebp
+ addl %esi, %eax
+ movl (%edi), %esi
+ adcl $0, %edx
+ addl %esi, %eax
+ adcl $0, %edx
+ decl %ecx
+ movl %eax, (%edi)
+ movl %edx, %esi
+ jz .L003maw_end
+ # Tail Round 1
+ movl 4(%ebx), %eax
+ mull %ebp
+ addl %esi, %eax
+ movl 4(%edi), %esi
+ adcl $0, %edx
+ addl %esi, %eax
+ adcl $0, %edx
+ decl %ecx
+ movl %eax, 4(%edi)
+ movl %edx, %esi
+ jz .L003maw_end
+ # Tail Round 2
+ movl 8(%ebx), %eax
+ mull %ebp
+ addl %esi, %eax
+ movl 8(%edi), %esi
+ adcl $0, %edx
+ addl %esi, %eax
+ adcl $0, %edx
+ decl %ecx
+ movl %eax, 8(%edi)
+ movl %edx, %esi
+ jz .L003maw_end
+ # Tail Round 3
+ movl 12(%ebx), %eax
+ mull %ebp
+ addl %esi, %eax
+ movl 12(%edi), %esi
+ adcl $0, %edx
+ addl %esi, %eax
+ adcl $0, %edx
+ decl %ecx
+ movl %eax, 12(%edi)
+ movl %edx, %esi
+ jz .L003maw_end
+ # Tail Round 4
+ movl 16(%ebx), %eax
+ mull %ebp
+ addl %esi, %eax
+ movl 16(%edi), %esi
+ adcl $0, %edx
+ addl %esi, %eax
+ adcl $0, %edx
+ decl %ecx
+ movl %eax, 16(%edi)
+ movl %edx, %esi
+ jz .L003maw_end
+ # Tail Round 5
+ movl 20(%ebx), %eax
+ mull %ebp
+ addl %esi, %eax
+ movl 20(%edi), %esi
+ adcl $0, %edx
+ addl %esi, %eax
+ adcl $0, %edx
+ decl %ecx
+ movl %eax, 20(%edi)
+ movl %edx, %esi
+ jz .L003maw_end
+ # Tail Round 6
+ movl 24(%ebx), %eax
+ mull %ebp
+ addl %esi, %eax
+ movl 24(%edi), %esi
+ adcl $0, %edx
+ addl %esi, %eax
+ adcl $0, %edx
+ movl %eax, 24(%edi)
+ movl %edx, %esi
+.L003maw_end:
+ movl %esi, %eax
+ popl %ecx
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.bn_mul_add_words_end:
+ .size bn_mul_add_words,.bn_mul_add_words_end-bn_mul_add_words
+.ident "bn_mul_add_words"
+.text
+ .align 16
+.globl bn_mul_words
+ .type bn_mul_words,@function
+bn_mul_words:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+
+
+ xorl %esi, %esi
+ movl 20(%esp), %edi
+ movl 24(%esp), %ebx
+ movl 28(%esp), %ebp
+ movl 32(%esp), %ecx
+ andl $4294967288, %ebp
+ jz .L004mw_finish
+.L005mw_loop:
+ # Round 0
+ movl (%ebx), %eax
+ mull %ecx
+ addl %esi, %eax
+ adcl $0, %edx
+ movl %eax, (%edi)
+ movl %edx, %esi
+ # Round 4
+ movl 4(%ebx), %eax
+ mull %ecx
+ addl %esi, %eax
+ adcl $0, %edx
+ movl %eax, 4(%edi)
+ movl %edx, %esi
+ # Round 8
+ movl 8(%ebx), %eax
+ mull %ecx
+ addl %esi, %eax
+ adcl $0, %edx
+ movl %eax, 8(%edi)
+ movl %edx, %esi
+ # Round 12
+ movl 12(%ebx), %eax
+ mull %ecx
+ addl %esi, %eax
+ adcl $0, %edx
+ movl %eax, 12(%edi)
+ movl %edx, %esi
+ # Round 16
+ movl 16(%ebx), %eax
+ mull %ecx
+ addl %esi, %eax
+ adcl $0, %edx
+ movl %eax, 16(%edi)
+ movl %edx, %esi
+ # Round 20
+ movl 20(%ebx), %eax
+ mull %ecx
+ addl %esi, %eax
+ adcl $0, %edx
+ movl %eax, 20(%edi)
+ movl %edx, %esi
+ # Round 24
+ movl 24(%ebx), %eax
+ mull %ecx
+ addl %esi, %eax
+ adcl $0, %edx
+ movl %eax, 24(%edi)
+ movl %edx, %esi
+ # Round 28
+ movl 28(%ebx), %eax
+ mull %ecx
+ addl %esi, %eax
+ adcl $0, %edx
+ movl %eax, 28(%edi)
+ movl %edx, %esi
+
+ addl $32, %ebx
+ addl $32, %edi
+ subl $8, %ebp
+ jz .L004mw_finish
+ jmp .L005mw_loop
+.L004mw_finish:
+ movl 28(%esp), %ebp
+ andl $7, %ebp
+ jnz .L006mw_finish2
+ jmp .L007mw_end
+.align 16
+.L006mw_finish2:
+ # Tail Round 0
+ movl (%ebx), %eax
+ mull %ecx
+ addl %esi, %eax
+ adcl $0, %edx
+ movl %eax, (%edi)
+ movl %edx, %esi
+ decl %ebp
+ jz .L007mw_end
+ # Tail Round 1
+ movl 4(%ebx), %eax
+ mull %ecx
+ addl %esi, %eax
+ adcl $0, %edx
+ movl %eax, 4(%edi)
+ movl %edx, %esi
+ decl %ebp
+ jz .L007mw_end
+ # Tail Round 2
+ movl 8(%ebx), %eax
+ mull %ecx
+ addl %esi, %eax
+ adcl $0, %edx
+ movl %eax, 8(%edi)
+ movl %edx, %esi
+ decl %ebp
+ jz .L007mw_end
+ # Tail Round 3
+ movl 12(%ebx), %eax
+ mull %ecx
+ addl %esi, %eax
+ adcl $0, %edx
+ movl %eax, 12(%edi)
+ movl %edx, %esi
+ decl %ebp
+ jz .L007mw_end
+ # Tail Round 4
+ movl 16(%ebx), %eax
+ mull %ecx
+ addl %esi, %eax
+ adcl $0, %edx
+ movl %eax, 16(%edi)
+ movl %edx, %esi
+ decl %ebp
+ jz .L007mw_end
+ # Tail Round 5
+ movl 20(%ebx), %eax
+ mull %ecx
+ addl %esi, %eax
+ adcl $0, %edx
+ movl %eax, 20(%edi)
+ movl %edx, %esi
+ decl %ebp
+ jz .L007mw_end
+ # Tail Round 6
+ movl 24(%ebx), %eax
+ mull %ecx
+ addl %esi, %eax
+ adcl $0, %edx
+ movl %eax, 24(%edi)
+ movl %edx, %esi
+.L007mw_end:
+ movl %esi, %eax
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.bn_mul_words_end:
+ .size bn_mul_words,.bn_mul_words_end-bn_mul_words
+.ident "bn_mul_words"
+.text
+ .align 16
+.globl bn_sqr_words
+ .type bn_sqr_words,@function
+bn_sqr_words:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+
+
+ movl 20(%esp), %esi
+ movl 24(%esp), %edi
+ movl 28(%esp), %ebx
+ andl $4294967288, %ebx
+ jz .L008sw_finish
+.L009sw_loop:
+ # Round 0
+ movl (%edi), %eax
+ mull %eax
+ movl %eax, (%esi)
+ movl %edx, 4(%esi)
+ # Round 4
+ movl 4(%edi), %eax
+ mull %eax
+ movl %eax, 8(%esi)
+ movl %edx, 12(%esi)
+ # Round 8
+ movl 8(%edi), %eax
+ mull %eax
+ movl %eax, 16(%esi)
+ movl %edx, 20(%esi)
+ # Round 12
+ movl 12(%edi), %eax
+ mull %eax
+ movl %eax, 24(%esi)
+ movl %edx, 28(%esi)
+ # Round 16
+ movl 16(%edi), %eax
+ mull %eax
+ movl %eax, 32(%esi)
+ movl %edx, 36(%esi)
+ # Round 20
+ movl 20(%edi), %eax
+ mull %eax
+ movl %eax, 40(%esi)
+ movl %edx, 44(%esi)
+ # Round 24
+ movl 24(%edi), %eax
+ mull %eax
+ movl %eax, 48(%esi)
+ movl %edx, 52(%esi)
+ # Round 28
+ movl 28(%edi), %eax
+ mull %eax
+ movl %eax, 56(%esi)
+ movl %edx, 60(%esi)
+
+ addl $32, %edi
+ addl $64, %esi
+ subl $8, %ebx
+ jnz .L009sw_loop
+.L008sw_finish:
+ movl 28(%esp), %ebx
+ andl $7, %ebx
+ jz .L010sw_end
+ # Tail Round 0
+ movl (%edi), %eax
+ mull %eax
+ movl %eax, (%esi)
+ decl %ebx
+ movl %edx, 4(%esi)
+ jz .L010sw_end
+ # Tail Round 1
+ movl 4(%edi), %eax
+ mull %eax
+ movl %eax, 8(%esi)
+ decl %ebx
+ movl %edx, 12(%esi)
+ jz .L010sw_end
+ # Tail Round 2
+ movl 8(%edi), %eax
+ mull %eax
+ movl %eax, 16(%esi)
+ decl %ebx
+ movl %edx, 20(%esi)
+ jz .L010sw_end
+ # Tail Round 3
+ movl 12(%edi), %eax
+ mull %eax
+ movl %eax, 24(%esi)
+ decl %ebx
+ movl %edx, 28(%esi)
+ jz .L010sw_end
+ # Tail Round 4
+ movl 16(%edi), %eax
+ mull %eax
+ movl %eax, 32(%esi)
+ decl %ebx
+ movl %edx, 36(%esi)
+ jz .L010sw_end
+ # Tail Round 5
+ movl 20(%edi), %eax
+ mull %eax
+ movl %eax, 40(%esi)
+ decl %ebx
+ movl %edx, 44(%esi)
+ jz .L010sw_end
+ # Tail Round 6
+ movl 24(%edi), %eax
+ mull %eax
+ movl %eax, 48(%esi)
+ movl %edx, 52(%esi)
+.L010sw_end:
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.bn_sqr_words_end:
+ .size bn_sqr_words,.bn_sqr_words_end-bn_sqr_words
+.ident "bn_sqr_words"
+.text
+ .align 16
+.globl bn_div_words
+ .type bn_div_words,@function
+bn_div_words:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+
+ movl 20(%esp), %edx
+ movl 24(%esp), %eax
+ movl 28(%esp), %ebx
+ divl %ebx
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.bn_div_words_end:
+ .size bn_div_words,.bn_div_words_end-bn_div_words
+.ident "bn_div_words"
+.text
+ .align 16
+.globl bn_add_words
+ .type bn_add_words,@function
+bn_add_words:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+
+
+ movl 20(%esp), %ebx
+ movl 24(%esp), %esi
+ movl 28(%esp), %edi
+ movl 32(%esp), %ebp
+ xorl %eax, %eax
+ andl $4294967288, %ebp
+ jz .L011aw_finish
+.L012aw_loop:
+ # Round 0
+ movl (%esi), %ecx
+ movl (%edi), %edx
+ addl %eax, %ecx
+ movl $0, %eax
+ adcl %eax, %eax
+ addl %edx, %ecx
+ adcl $0, %eax
+ movl %ecx, (%ebx)
+ # Round 1
+ movl 4(%esi), %ecx
+ movl 4(%edi), %edx
+ addl %eax, %ecx
+ movl $0, %eax
+ adcl %eax, %eax
+ addl %edx, %ecx
+ adcl $0, %eax
+ movl %ecx, 4(%ebx)
+ # Round 2
+ movl 8(%esi), %ecx
+ movl 8(%edi), %edx
+ addl %eax, %ecx
+ movl $0, %eax
+ adcl %eax, %eax
+ addl %edx, %ecx
+ adcl $0, %eax
+ movl %ecx, 8(%ebx)
+ # Round 3
+ movl 12(%esi), %ecx
+ movl 12(%edi), %edx
+ addl %eax, %ecx
+ movl $0, %eax
+ adcl %eax, %eax
+ addl %edx, %ecx
+ adcl $0, %eax
+ movl %ecx, 12(%ebx)
+ # Round 4
+ movl 16(%esi), %ecx
+ movl 16(%edi), %edx
+ addl %eax, %ecx
+ movl $0, %eax
+ adcl %eax, %eax
+ addl %edx, %ecx
+ adcl $0, %eax
+ movl %ecx, 16(%ebx)
+ # Round 5
+ movl 20(%esi), %ecx
+ movl 20(%edi), %edx
+ addl %eax, %ecx
+ movl $0, %eax
+ adcl %eax, %eax
+ addl %edx, %ecx
+ adcl $0, %eax
+ movl %ecx, 20(%ebx)
+ # Round 6
+ movl 24(%esi), %ecx
+ movl 24(%edi), %edx
+ addl %eax, %ecx
+ movl $0, %eax
+ adcl %eax, %eax
+ addl %edx, %ecx
+ adcl $0, %eax
+ movl %ecx, 24(%ebx)
+ # Round 7
+ movl 28(%esi), %ecx
+ movl 28(%edi), %edx
+ addl %eax, %ecx
+ movl $0, %eax
+ adcl %eax, %eax
+ addl %edx, %ecx
+ adcl $0, %eax
+ movl %ecx, 28(%ebx)
+
+ addl $32, %esi
+ addl $32, %edi
+ addl $32, %ebx
+ subl $8, %ebp
+ jnz .L012aw_loop
+.L011aw_finish:
+ movl 32(%esp), %ebp
+ andl $7, %ebp
+ jz .L013aw_end
+ # Tail Round 0
+ movl (%esi), %ecx
+ movl (%edi), %edx
+ addl %eax, %ecx
+ movl $0, %eax
+ adcl %eax, %eax
+ addl %edx, %ecx
+ adcl $0, %eax
+ decl %ebp
+ movl %ecx, (%ebx)
+ jz .L013aw_end
+ # Tail Round 1
+ movl 4(%esi), %ecx
+ movl 4(%edi), %edx
+ addl %eax, %ecx
+ movl $0, %eax
+ adcl %eax, %eax
+ addl %edx, %ecx
+ adcl $0, %eax
+ decl %ebp
+ movl %ecx, 4(%ebx)
+ jz .L013aw_end
+ # Tail Round 2
+ movl 8(%esi), %ecx
+ movl 8(%edi), %edx
+ addl %eax, %ecx
+ movl $0, %eax
+ adcl %eax, %eax
+ addl %edx, %ecx
+ adcl $0, %eax
+ decl %ebp
+ movl %ecx, 8(%ebx)
+ jz .L013aw_end
+ # Tail Round 3
+ movl 12(%esi), %ecx
+ movl 12(%edi), %edx
+ addl %eax, %ecx
+ movl $0, %eax
+ adcl %eax, %eax
+ addl %edx, %ecx
+ adcl $0, %eax
+ decl %ebp
+ movl %ecx, 12(%ebx)
+ jz .L013aw_end
+ # Tail Round 4
+ movl 16(%esi), %ecx
+ movl 16(%edi), %edx
+ addl %eax, %ecx
+ movl $0, %eax
+ adcl %eax, %eax
+ addl %edx, %ecx
+ adcl $0, %eax
+ decl %ebp
+ movl %ecx, 16(%ebx)
+ jz .L013aw_end
+ # Tail Round 5
+ movl 20(%esi), %ecx
+ movl 20(%edi), %edx
+ addl %eax, %ecx
+ movl $0, %eax
+ adcl %eax, %eax
+ addl %edx, %ecx
+ adcl $0, %eax
+ decl %ebp
+ movl %ecx, 20(%ebx)
+ jz .L013aw_end
+ # Tail Round 6
+ movl 24(%esi), %ecx
+ movl 24(%edi), %edx
+ addl %eax, %ecx
+ movl $0, %eax
+ adcl %eax, %eax
+ addl %edx, %ecx
+ adcl $0, %eax
+ movl %ecx, 24(%ebx)
+.L013aw_end:
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.bn_add_words_end:
+ .size bn_add_words,.bn_add_words_end-bn_add_words
+.ident "bn_add_words"
+.text
+ .align 16
+.globl bn_sub_words
+ .type bn_sub_words,@function
+bn_sub_words:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+
+
+ movl 20(%esp), %ebx
+ movl 24(%esp), %esi
+ movl 28(%esp), %edi
+ movl 32(%esp), %ebp
+ xorl %eax, %eax
+ andl $4294967288, %ebp
+ jz .L014aw_finish
+.L015aw_loop:
+ # Round 0
+ movl (%esi), %ecx
+ movl (%edi), %edx
+ subl %eax, %ecx
+ movl $0, %eax
+ adcl %eax, %eax
+ subl %edx, %ecx
+ adcl $0, %eax
+ movl %ecx, (%ebx)
+ # Round 1
+ movl 4(%esi), %ecx
+ movl 4(%edi), %edx
+ subl %eax, %ecx
+ movl $0, %eax
+ adcl %eax, %eax
+ subl %edx, %ecx
+ adcl $0, %eax
+ movl %ecx, 4(%ebx)
+ # Round 2
+ movl 8(%esi), %ecx
+ movl 8(%edi), %edx
+ subl %eax, %ecx
+ movl $0, %eax
+ adcl %eax, %eax
+ subl %edx, %ecx
+ adcl $0, %eax
+ movl %ecx, 8(%ebx)
+ # Round 3
+ movl 12(%esi), %ecx
+ movl 12(%edi), %edx
+ subl %eax, %ecx
+ movl $0, %eax
+ adcl %eax, %eax
+ subl %edx, %ecx
+ adcl $0, %eax
+ movl %ecx, 12(%ebx)
+ # Round 4
+ movl 16(%esi), %ecx
+ movl 16(%edi), %edx
+ subl %eax, %ecx
+ movl $0, %eax
+ adcl %eax, %eax
+ subl %edx, %ecx
+ adcl $0, %eax
+ movl %ecx, 16(%ebx)
+ # Round 5
+ movl 20(%esi), %ecx
+ movl 20(%edi), %edx
+ subl %eax, %ecx
+ movl $0, %eax
+ adcl %eax, %eax
+ subl %edx, %ecx
+ adcl $0, %eax
+ movl %ecx, 20(%ebx)
+ # Round 6
+ movl 24(%esi), %ecx
+ movl 24(%edi), %edx
+ subl %eax, %ecx
+ movl $0, %eax
+ adcl %eax, %eax
+ subl %edx, %ecx
+ adcl $0, %eax
+ movl %ecx, 24(%ebx)
+ # Round 7
+ movl 28(%esi), %ecx
+ movl 28(%edi), %edx
+ subl %eax, %ecx
+ movl $0, %eax
+ adcl %eax, %eax
+ subl %edx, %ecx
+ adcl $0, %eax
+ movl %ecx, 28(%ebx)
+
+ addl $32, %esi
+ addl $32, %edi
+ addl $32, %ebx
+ subl $8, %ebp
+ jnz .L015aw_loop
+.L014aw_finish:
+ movl 32(%esp), %ebp
+ andl $7, %ebp
+ jz .L016aw_end
+ # Tail Round 0
+ movl (%esi), %ecx
+ movl (%edi), %edx
+ subl %eax, %ecx
+ movl $0, %eax
+ adcl %eax, %eax
+ subl %edx, %ecx
+ adcl $0, %eax
+ decl %ebp
+ movl %ecx, (%ebx)
+ jz .L016aw_end
+ # Tail Round 1
+ movl 4(%esi), %ecx
+ movl 4(%edi), %edx
+ subl %eax, %ecx
+ movl $0, %eax
+ adcl %eax, %eax
+ subl %edx, %ecx
+ adcl $0, %eax
+ decl %ebp
+ movl %ecx, 4(%ebx)
+ jz .L016aw_end
+ # Tail Round 2
+ movl 8(%esi), %ecx
+ movl 8(%edi), %edx
+ subl %eax, %ecx
+ movl $0, %eax
+ adcl %eax, %eax
+ subl %edx, %ecx
+ adcl $0, %eax
+ decl %ebp
+ movl %ecx, 8(%ebx)
+ jz .L016aw_end
+ # Tail Round 3
+ movl 12(%esi), %ecx
+ movl 12(%edi), %edx
+ subl %eax, %ecx
+ movl $0, %eax
+ adcl %eax, %eax
+ subl %edx, %ecx
+ adcl $0, %eax
+ decl %ebp
+ movl %ecx, 12(%ebx)
+ jz .L016aw_end
+ # Tail Round 4
+ movl 16(%esi), %ecx
+ movl 16(%edi), %edx
+ subl %eax, %ecx
+ movl $0, %eax
+ adcl %eax, %eax
+ subl %edx, %ecx
+ adcl $0, %eax
+ decl %ebp
+ movl %ecx, 16(%ebx)
+ jz .L016aw_end
+ # Tail Round 5
+ movl 20(%esi), %ecx
+ movl 20(%edi), %edx
+ subl %eax, %ecx
+ movl $0, %eax
+ adcl %eax, %eax
+ subl %edx, %ecx
+ adcl $0, %eax
+ decl %ebp
+ movl %ecx, 20(%ebx)
+ jz .L016aw_end
+ # Tail Round 6
+ movl 24(%esi), %ecx
+ movl 24(%edi), %edx
+ subl %eax, %ecx
+ movl $0, %eax
+ adcl %eax, %eax
+ subl %edx, %ecx
+ adcl $0, %eax
+ movl %ecx, 24(%ebx)
+.L016aw_end:
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.bn_sub_words_end:
+ .size bn_sub_words,.bn_sub_words_end-bn_sub_words
+.ident "bn_sub_words"
diff --git a/secure/lib/libcrypto/i386/cast-586.s b/secure/lib/libcrypto/i386/cast-586.s
new file mode 100644
index 0000000..b6da53b
--- /dev/null
+++ b/secure/lib/libcrypto/i386/cast-586.s
@@ -0,0 +1,971 @@
+ # $FreeBSD$
+ # Dont even think of reading this code
+ # It was automatically generated by cast-586.pl
+ # Which is a perl program used to generate the x86 assember for
+ # any of elf, a.out, BSDI, Win32, gaswin (for GNU as on Win32) or Solaris
+ # eric <eay@cryptsoft.com>
+
+ .file "cast-586.s"
+ .version "01.01"
+gcc2_compiled.:
+.text
+ .align 16
+.globl CAST_encrypt
+ .type CAST_encrypt,@function
+CAST_encrypt:
+
+ pushl %ebp
+ pushl %ebx
+ movl 12(%esp), %ebx
+ movl 16(%esp), %ebp
+ pushl %esi
+ pushl %edi
+ # Load the 2 words
+ movl (%ebx), %edi
+ movl 4(%ebx), %esi
+ # Get short key flag
+ movl 128(%ebp), %eax
+ pushl %eax
+ xorl %eax, %eax
+ # round 0
+ movl (%ebp), %edx
+ movl 4(%ebp), %ecx
+ addl %esi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ addl %ebx, %ecx
+ xorl %ecx, %edi
+ # round 1
+ movl 8(%ebp), %edx
+ movl 12(%ebp), %ecx
+ xorl %edi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx, %ecx
+ xorl %ecx, %esi
+ # round 2
+ movl 16(%ebp), %edx
+ movl 20(%ebp), %ecx
+ subl %esi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ subl %ebx, %ecx
+ xorl %ecx, %edi
+ # round 3
+ movl 24(%ebp), %edx
+ movl 28(%ebp), %ecx
+ addl %edi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ addl %ebx, %ecx
+ xorl %ecx, %esi
+ # round 4
+ movl 32(%ebp), %edx
+ movl 36(%ebp), %ecx
+ xorl %esi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx, %ecx
+ xorl %ecx, %edi
+ # round 5
+ movl 40(%ebp), %edx
+ movl 44(%ebp), %ecx
+ subl %edi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ subl %ebx, %ecx
+ xorl %ecx, %esi
+ # round 6
+ movl 48(%ebp), %edx
+ movl 52(%ebp), %ecx
+ addl %esi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ addl %ebx, %ecx
+ xorl %ecx, %edi
+ # round 7
+ movl 56(%ebp), %edx
+ movl 60(%ebp), %ecx
+ xorl %edi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx, %ecx
+ xorl %ecx, %esi
+ # round 8
+ movl 64(%ebp), %edx
+ movl 68(%ebp), %ecx
+ subl %esi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ subl %ebx, %ecx
+ xorl %ecx, %edi
+ # round 9
+ movl 72(%ebp), %edx
+ movl 76(%ebp), %ecx
+ addl %edi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ addl %ebx, %ecx
+ xorl %ecx, %esi
+ # round 10
+ movl 80(%ebp), %edx
+ movl 84(%ebp), %ecx
+ xorl %esi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx, %ecx
+ xorl %ecx, %edi
+ # round 11
+ movl 88(%ebp), %edx
+ movl 92(%ebp), %ecx
+ subl %edi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ subl %ebx, %ecx
+ xorl %ecx, %esi
+ # test short key flag
+ popl %edx
+ orl %edx, %edx
+ jnz .L000cast_enc_done
+ # round 12
+ movl 96(%ebp), %edx
+ movl 100(%ebp), %ecx
+ addl %esi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ addl %ebx, %ecx
+ xorl %ecx, %edi
+ # round 13
+ movl 104(%ebp), %edx
+ movl 108(%ebp), %ecx
+ xorl %edi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx, %ecx
+ xorl %ecx, %esi
+ # round 14
+ movl 112(%ebp), %edx
+ movl 116(%ebp), %ecx
+ subl %esi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ subl %ebx, %ecx
+ xorl %ecx, %edi
+ # round 15
+ movl 120(%ebp), %edx
+ movl 124(%ebp), %ecx
+ addl %edi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ addl %ebx, %ecx
+ xorl %ecx, %esi
+.L000cast_enc_done:
+ nop
+ movl 20(%esp), %eax
+ movl %edi, 4(%eax)
+ movl %esi, (%eax)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.CAST_encrypt_end:
+ .size CAST_encrypt,.CAST_encrypt_end-CAST_encrypt
+.ident "CAST_encrypt"
+.text
+ .align 16
+.globl CAST_decrypt
+ .type CAST_decrypt,@function
+CAST_decrypt:
+
+ pushl %ebp
+ pushl %ebx
+ movl 12(%esp), %ebx
+ movl 16(%esp), %ebp
+ pushl %esi
+ pushl %edi
+ # Load the 2 words
+ movl (%ebx), %edi
+ movl 4(%ebx), %esi
+ # Get short key flag
+ movl 128(%ebp), %eax
+ orl %eax, %eax
+ jnz .L001cast_dec_skip
+ xorl %eax, %eax
+ # round 15
+ movl 120(%ebp), %edx
+ movl 124(%ebp), %ecx
+ addl %esi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ addl %ebx, %ecx
+ xorl %ecx, %edi
+ # round 14
+ movl 112(%ebp), %edx
+ movl 116(%ebp), %ecx
+ subl %edi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ subl %ebx, %ecx
+ xorl %ecx, %esi
+ # round 13
+ movl 104(%ebp), %edx
+ movl 108(%ebp), %ecx
+ xorl %esi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx, %ecx
+ xorl %ecx, %edi
+ # round 12
+ movl 96(%ebp), %edx
+ movl 100(%ebp), %ecx
+ addl %edi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ addl %ebx, %ecx
+ xorl %ecx, %esi
+.L001cast_dec_skip:
+ # round 11
+ movl 88(%ebp), %edx
+ movl 92(%ebp), %ecx
+ subl %esi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ subl %ebx, %ecx
+ xorl %ecx, %edi
+ # round 10
+ movl 80(%ebp), %edx
+ movl 84(%ebp), %ecx
+ xorl %edi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx, %ecx
+ xorl %ecx, %esi
+ # round 9
+ movl 72(%ebp), %edx
+ movl 76(%ebp), %ecx
+ addl %esi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ addl %ebx, %ecx
+ xorl %ecx, %edi
+ # round 8
+ movl 64(%ebp), %edx
+ movl 68(%ebp), %ecx
+ subl %edi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ subl %ebx, %ecx
+ xorl %ecx, %esi
+ # round 7
+ movl 56(%ebp), %edx
+ movl 60(%ebp), %ecx
+ xorl %esi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx, %ecx
+ xorl %ecx, %edi
+ # round 6
+ movl 48(%ebp), %edx
+ movl 52(%ebp), %ecx
+ addl %edi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ addl %ebx, %ecx
+ xorl %ecx, %esi
+ # round 5
+ movl 40(%ebp), %edx
+ movl 44(%ebp), %ecx
+ subl %esi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ subl %ebx, %ecx
+ xorl %ecx, %edi
+ # round 4
+ movl 32(%ebp), %edx
+ movl 36(%ebp), %ecx
+ xorl %edi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx, %ecx
+ xorl %ecx, %esi
+ # round 3
+ movl 24(%ebp), %edx
+ movl 28(%ebp), %ecx
+ addl %esi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ addl %ebx, %ecx
+ xorl %ecx, %edi
+ # round 2
+ movl 16(%ebp), %edx
+ movl 20(%ebp), %ecx
+ subl %edi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ subl %ebx, %ecx
+ xorl %ecx, %esi
+ # round 1
+ movl 8(%ebp), %edx
+ movl 12(%ebp), %ecx
+ xorl %esi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx, %ecx
+ xorl %ecx, %edi
+ # round 0
+ movl (%ebp), %edx
+ movl 4(%ebp), %ecx
+ addl %edi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ addl %ebx, %ecx
+ xorl %ecx, %esi
+ nop
+ movl 20(%esp), %eax
+ movl %edi, 4(%eax)
+ movl %esi, (%eax)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.CAST_decrypt_end:
+ .size CAST_decrypt,.CAST_decrypt_end-CAST_decrypt
+.ident "CAST_decrypt"
+.text
+ .align 16
+.globl CAST_cbc_encrypt
+ .type CAST_cbc_encrypt,@function
+CAST_cbc_encrypt:
+
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 28(%esp), %ebp
+ # getting iv ptr from parameter 4
+ movl 36(%esp), %ebx
+ movl (%ebx), %esi
+ movl 4(%ebx), %edi
+ pushl %edi
+ pushl %esi
+ pushl %edi
+ pushl %esi
+ movl %esp, %ebx
+ movl 36(%esp), %esi
+ movl 40(%esp), %edi
+ # getting encrypt flag from parameter 5
+ movl 56(%esp), %ecx
+ # get and push parameter 3
+ movl 48(%esp), %eax
+ pushl %eax
+ pushl %ebx
+ cmpl $0, %ecx
+ jz .L002decrypt
+ andl $4294967288, %ebp
+ movl 8(%esp), %eax
+ movl 12(%esp), %ebx
+ jz .L003encrypt_finish
+.L004encrypt_loop:
+ movl (%esi), %ecx
+ movl 4(%esi), %edx
+ xorl %ecx, %eax
+ xorl %edx, %ebx
+.byte 15
+.byte 200 # bswapl %eax
+.byte 15
+.byte 203 # bswapl %ebx
+ movl %eax, 8(%esp)
+ movl %ebx, 12(%esp)
+ call CAST_encrypt
+ movl 8(%esp), %eax
+ movl 12(%esp), %ebx
+.byte 15
+.byte 200 # bswapl %eax
+.byte 15
+.byte 203 # bswapl %ebx
+ movl %eax, (%edi)
+ movl %ebx, 4(%edi)
+ addl $8, %esi
+ addl $8, %edi
+ subl $8, %ebp
+ jnz .L004encrypt_loop
+.L003encrypt_finish:
+ movl 52(%esp), %ebp
+ andl $7, %ebp
+ jz .L005finish
+ xorl %ecx, %ecx
+ xorl %edx, %edx
+ movl .L006cbc_enc_jmp_table(,%ebp,4),%ebp
+ jmp *%ebp
+.L007ej7:
+ xorl %edx, %edx
+ movb 6(%esi), %dh
+ sall $8, %edx
+.L008ej6:
+ movb 5(%esi), %dh
+.L009ej5:
+ movb 4(%esi), %dl
+.L010ej4:
+ movl (%esi), %ecx
+ jmp .L011ejend
+.L012ej3:
+ movb 2(%esi), %ch
+ xorl %ecx, %ecx
+ sall $8, %ecx
+.L013ej2:
+ movb 1(%esi), %ch
+.L014ej1:
+ movb (%esi), %cl
+.L011ejend:
+ xorl %ecx, %eax
+ xorl %edx, %ebx
+.byte 15
+.byte 200 # bswapl %eax
+.byte 15
+.byte 203 # bswapl %ebx
+ movl %eax, 8(%esp)
+ movl %ebx, 12(%esp)
+ call CAST_encrypt
+ movl 8(%esp), %eax
+ movl 12(%esp), %ebx
+.byte 15
+.byte 200 # bswapl %eax
+.byte 15
+.byte 203 # bswapl %ebx
+ movl %eax, (%edi)
+ movl %ebx, 4(%edi)
+ jmp .L005finish
+.align 16
+.L002decrypt:
+ andl $4294967288, %ebp
+ movl 16(%esp), %eax
+ movl 20(%esp), %ebx
+ jz .L015decrypt_finish
+.L016decrypt_loop:
+ movl (%esi), %eax
+ movl 4(%esi), %ebx
+.byte 15
+.byte 200 # bswapl %eax
+.byte 15
+.byte 203 # bswapl %ebx
+ movl %eax, 8(%esp)
+ movl %ebx, 12(%esp)
+ call CAST_decrypt
+ movl 8(%esp), %eax
+ movl 12(%esp), %ebx
+.byte 15
+.byte 200 # bswapl %eax
+.byte 15
+.byte 203 # bswapl %ebx
+ movl 16(%esp), %ecx
+ movl 20(%esp), %edx
+ xorl %eax, %ecx
+ xorl %ebx, %edx
+ movl (%esi), %eax
+ movl 4(%esi), %ebx
+ movl %ecx, (%edi)
+ movl %edx, 4(%edi)
+ movl %eax, 16(%esp)
+ movl %ebx, 20(%esp)
+ addl $8, %esi
+ addl $8, %edi
+ subl $8, %ebp
+ jnz .L016decrypt_loop
+.L015decrypt_finish:
+ movl 52(%esp), %ebp
+ andl $7, %ebp
+ jz .L005finish
+ movl (%esi), %eax
+ movl 4(%esi), %ebx
+.byte 15
+.byte 200 # bswapl %eax
+.byte 15
+.byte 203 # bswapl %ebx
+ movl %eax, 8(%esp)
+ movl %ebx, 12(%esp)
+ call CAST_decrypt
+ movl 8(%esp), %eax
+ movl 12(%esp), %ebx
+.byte 15
+.byte 200 # bswapl %eax
+.byte 15
+.byte 203 # bswapl %ebx
+ movl 16(%esp), %ecx
+ movl 20(%esp), %edx
+ xorl %eax, %ecx
+ xorl %ebx, %edx
+ movl (%esi), %eax
+ movl 4(%esi), %ebx
+.L017dj7:
+ rorl $16, %edx
+ movb %dl, 6(%edi)
+ shrl $16, %edx
+.L018dj6:
+ movb %dh, 5(%edi)
+.L019dj5:
+ movb %dl, 4(%edi)
+.L020dj4:
+ movl %ecx, (%edi)
+ jmp .L021djend
+.L022dj3:
+ rorl $16, %ecx
+ movb %cl, 2(%edi)
+ sall $16, %ecx
+.L023dj2:
+ movb %ch, 1(%esi)
+.L024dj1:
+ movb %cl, (%esi)
+.L021djend:
+ jmp .L005finish
+.align 16
+.L005finish:
+ movl 60(%esp), %ecx
+ addl $24, %esp
+ movl %eax, (%ecx)
+ movl %ebx, 4(%ecx)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 16
+.L006cbc_enc_jmp_table:
+ .long 0
+ .long .L014ej1
+ .long .L013ej2
+ .long .L012ej3
+ .long .L010ej4
+ .long .L009ej5
+ .long .L008ej6
+ .long .L007ej7
+.align 16
+.L025cbc_dec_jmp_table:
+ .long 0
+ .long .L024dj1
+ .long .L023dj2
+ .long .L022dj3
+ .long .L020dj4
+ .long .L019dj5
+ .long .L018dj6
+ .long .L017dj7
+.L_CAST_cbc_encrypt_end:
+ .size CAST_cbc_encrypt,.L_CAST_cbc_encrypt_end-CAST_cbc_encrypt
+.ident "desasm.pl"
diff --git a/secure/lib/libcrypto/i386/co-586.s b/secure/lib/libcrypto/i386/co-586.s
new file mode 100644
index 0000000..084f6fe
--- /dev/null
+++ b/secure/lib/libcrypto/i386/co-586.s
@@ -0,0 +1,1270 @@
+ # $FreeBSD$
+ # Dont even think of reading this code
+ # It was automatically generated by co-586.pl
+ # Which is a perl program used to generate the x86 assember for
+ # any of elf, a.out, BSDI, Win32, gaswin (for GNU as on Win32) or Solaris
+ # eric <eay@cryptsoft.com>
+
+ .file "co-586.s"
+ .version "01.01"
+gcc2_compiled.:
+.text
+ .align 16
+.globl bn_mul_comba8
+ .type bn_mul_comba8,@function
+bn_mul_comba8:
+ pushl %esi
+ movl 12(%esp), %esi
+ pushl %edi
+ movl 20(%esp), %edi
+ pushl %ebp
+ pushl %ebx
+ xorl %ebx, %ebx
+ movl (%esi), %eax
+ xorl %ecx, %ecx
+ movl (%edi), %edx
+ # ################## Calculate word 0
+ xorl %ebp, %ebp
+ # mul a[0]*b[0]
+ mull %edx
+ addl %eax, %ebx
+ movl 20(%esp), %eax
+ adcl %edx, %ecx
+ movl (%edi), %edx
+ adcl $0, %ebp
+ movl %ebx, (%eax)
+ movl 4(%esi), %eax
+ # saved r[0]
+ # ################## Calculate word 1
+ xorl %ebx, %ebx
+ # mul a[1]*b[0]
+ mull %edx
+ addl %eax, %ecx
+ movl (%esi), %eax
+ adcl %edx, %ebp
+ movl 4(%edi), %edx
+ adcl $0, %ebx
+ # mul a[0]*b[1]
+ mull %edx
+ addl %eax, %ecx
+ movl 20(%esp), %eax
+ adcl %edx, %ebp
+ movl (%edi), %edx
+ adcl $0, %ebx
+ movl %ecx, 4(%eax)
+ movl 8(%esi), %eax
+ # saved r[1]
+ # ################## Calculate word 2
+ xorl %ecx, %ecx
+ # mul a[2]*b[0]
+ mull %edx
+ addl %eax, %ebp
+ movl 4(%esi), %eax
+ adcl %edx, %ebx
+ movl 4(%edi), %edx
+ adcl $0, %ecx
+ # mul a[1]*b[1]
+ mull %edx
+ addl %eax, %ebp
+ movl (%esi), %eax
+ adcl %edx, %ebx
+ movl 8(%edi), %edx
+ adcl $0, %ecx
+ # mul a[0]*b[2]
+ mull %edx
+ addl %eax, %ebp
+ movl 20(%esp), %eax
+ adcl %edx, %ebx
+ movl (%edi), %edx
+ adcl $0, %ecx
+ movl %ebp, 8(%eax)
+ movl 12(%esi), %eax
+ # saved r[2]
+ # ################## Calculate word 3
+ xorl %ebp, %ebp
+ # mul a[3]*b[0]
+ mull %edx
+ addl %eax, %ebx
+ movl 8(%esi), %eax
+ adcl %edx, %ecx
+ movl 4(%edi), %edx
+ adcl $0, %ebp
+ # mul a[2]*b[1]
+ mull %edx
+ addl %eax, %ebx
+ movl 4(%esi), %eax
+ adcl %edx, %ecx
+ movl 8(%edi), %edx
+ adcl $0, %ebp
+ # mul a[1]*b[2]
+ mull %edx
+ addl %eax, %ebx
+ movl (%esi), %eax
+ adcl %edx, %ecx
+ movl 12(%edi), %edx
+ adcl $0, %ebp
+ # mul a[0]*b[3]
+ mull %edx
+ addl %eax, %ebx
+ movl 20(%esp), %eax
+ adcl %edx, %ecx
+ movl (%edi), %edx
+ adcl $0, %ebp
+ movl %ebx, 12(%eax)
+ movl 16(%esi), %eax
+ # saved r[3]
+ # ################## Calculate word 4
+ xorl %ebx, %ebx
+ # mul a[4]*b[0]
+ mull %edx
+ addl %eax, %ecx
+ movl 12(%esi), %eax
+ adcl %edx, %ebp
+ movl 4(%edi), %edx
+ adcl $0, %ebx
+ # mul a[3]*b[1]
+ mull %edx
+ addl %eax, %ecx
+ movl 8(%esi), %eax
+ adcl %edx, %ebp
+ movl 8(%edi), %edx
+ adcl $0, %ebx
+ # mul a[2]*b[2]
+ mull %edx
+ addl %eax, %ecx
+ movl 4(%esi), %eax
+ adcl %edx, %ebp
+ movl 12(%edi), %edx
+ adcl $0, %ebx
+ # mul a[1]*b[3]
+ mull %edx
+ addl %eax, %ecx
+ movl (%esi), %eax
+ adcl %edx, %ebp
+ movl 16(%edi), %edx
+ adcl $0, %ebx
+ # mul a[0]*b[4]
+ mull %edx
+ addl %eax, %ecx
+ movl 20(%esp), %eax
+ adcl %edx, %ebp
+ movl (%edi), %edx
+ adcl $0, %ebx
+ movl %ecx, 16(%eax)
+ movl 20(%esi), %eax
+ # saved r[4]
+ # ################## Calculate word 5
+ xorl %ecx, %ecx
+ # mul a[5]*b[0]
+ mull %edx
+ addl %eax, %ebp
+ movl 16(%esi), %eax
+ adcl %edx, %ebx
+ movl 4(%edi), %edx
+ adcl $0, %ecx
+ # mul a[4]*b[1]
+ mull %edx
+ addl %eax, %ebp
+ movl 12(%esi), %eax
+ adcl %edx, %ebx
+ movl 8(%edi), %edx
+ adcl $0, %ecx
+ # mul a[3]*b[2]
+ mull %edx
+ addl %eax, %ebp
+ movl 8(%esi), %eax
+ adcl %edx, %ebx
+ movl 12(%edi), %edx
+ adcl $0, %ecx
+ # mul a[2]*b[3]
+ mull %edx
+ addl %eax, %ebp
+ movl 4(%esi), %eax
+ adcl %edx, %ebx
+ movl 16(%edi), %edx
+ adcl $0, %ecx
+ # mul a[1]*b[4]
+ mull %edx
+ addl %eax, %ebp
+ movl (%esi), %eax
+ adcl %edx, %ebx
+ movl 20(%edi), %edx
+ adcl $0, %ecx
+ # mul a[0]*b[5]
+ mull %edx
+ addl %eax, %ebp
+ movl 20(%esp), %eax
+ adcl %edx, %ebx
+ movl (%edi), %edx
+ adcl $0, %ecx
+ movl %ebp, 20(%eax)
+ movl 24(%esi), %eax
+ # saved r[5]
+ # ################## Calculate word 6
+ xorl %ebp, %ebp
+ # mul a[6]*b[0]
+ mull %edx
+ addl %eax, %ebx
+ movl 20(%esi), %eax
+ adcl %edx, %ecx
+ movl 4(%edi), %edx
+ adcl $0, %ebp
+ # mul a[5]*b[1]
+ mull %edx
+ addl %eax, %ebx
+ movl 16(%esi), %eax
+ adcl %edx, %ecx
+ movl 8(%edi), %edx
+ adcl $0, %ebp
+ # mul a[4]*b[2]
+ mull %edx
+ addl %eax, %ebx
+ movl 12(%esi), %eax
+ adcl %edx, %ecx
+ movl 12(%edi), %edx
+ adcl $0, %ebp
+ # mul a[3]*b[3]
+ mull %edx
+ addl %eax, %ebx
+ movl 8(%esi), %eax
+ adcl %edx, %ecx
+ movl 16(%edi), %edx
+ adcl $0, %ebp
+ # mul a[2]*b[4]
+ mull %edx
+ addl %eax, %ebx
+ movl 4(%esi), %eax
+ adcl %edx, %ecx
+ movl 20(%edi), %edx
+ adcl $0, %ebp
+ # mul a[1]*b[5]
+ mull %edx
+ addl %eax, %ebx
+ movl (%esi), %eax
+ adcl %edx, %ecx
+ movl 24(%edi), %edx
+ adcl $0, %ebp
+ # mul a[0]*b[6]
+ mull %edx
+ addl %eax, %ebx
+ movl 20(%esp), %eax
+ adcl %edx, %ecx
+ movl (%edi), %edx
+ adcl $0, %ebp
+ movl %ebx, 24(%eax)
+ movl 28(%esi), %eax
+ # saved r[6]
+ # ################## Calculate word 7
+ xorl %ebx, %ebx
+ # mul a[7]*b[0]
+ mull %edx
+ addl %eax, %ecx
+ movl 24(%esi), %eax
+ adcl %edx, %ebp
+ movl 4(%edi), %edx
+ adcl $0, %ebx
+ # mul a[6]*b[1]
+ mull %edx
+ addl %eax, %ecx
+ movl 20(%esi), %eax
+ adcl %edx, %ebp
+ movl 8(%edi), %edx
+ adcl $0, %ebx
+ # mul a[5]*b[2]
+ mull %edx
+ addl %eax, %ecx
+ movl 16(%esi), %eax
+ adcl %edx, %ebp
+ movl 12(%edi), %edx
+ adcl $0, %ebx
+ # mul a[4]*b[3]
+ mull %edx
+ addl %eax, %ecx
+ movl 12(%esi), %eax
+ adcl %edx, %ebp
+ movl 16(%edi), %edx
+ adcl $0, %ebx
+ # mul a[3]*b[4]
+ mull %edx
+ addl %eax, %ecx
+ movl 8(%esi), %eax
+ adcl %edx, %ebp
+ movl 20(%edi), %edx
+ adcl $0, %ebx
+ # mul a[2]*b[5]
+ mull %edx
+ addl %eax, %ecx
+ movl 4(%esi), %eax
+ adcl %edx, %ebp
+ movl 24(%edi), %edx
+ adcl $0, %ebx
+ # mul a[1]*b[6]
+ mull %edx
+ addl %eax, %ecx
+ movl (%esi), %eax
+ adcl %edx, %ebp
+ movl 28(%edi), %edx
+ adcl $0, %ebx
+ # mul a[0]*b[7]
+ mull %edx
+ addl %eax, %ecx
+ movl 20(%esp), %eax
+ adcl %edx, %ebp
+ movl 4(%edi), %edx
+ adcl $0, %ebx
+ movl %ecx, 28(%eax)
+ movl 28(%esi), %eax
+ # saved r[7]
+ # ################## Calculate word 8
+ xorl %ecx, %ecx
+ # mul a[7]*b[1]
+ mull %edx
+ addl %eax, %ebp
+ movl 24(%esi), %eax
+ adcl %edx, %ebx
+ movl 8(%edi), %edx
+ adcl $0, %ecx
+ # mul a[6]*b[2]
+ mull %edx
+ addl %eax, %ebp
+ movl 20(%esi), %eax
+ adcl %edx, %ebx
+ movl 12(%edi), %edx
+ adcl $0, %ecx
+ # mul a[5]*b[3]
+ mull %edx
+ addl %eax, %ebp
+ movl 16(%esi), %eax
+ adcl %edx, %ebx
+ movl 16(%edi), %edx
+ adcl $0, %ecx
+ # mul a[4]*b[4]
+ mull %edx
+ addl %eax, %ebp
+ movl 12(%esi), %eax
+ adcl %edx, %ebx
+ movl 20(%edi), %edx
+ adcl $0, %ecx
+ # mul a[3]*b[5]
+ mull %edx
+ addl %eax, %ebp
+ movl 8(%esi), %eax
+ adcl %edx, %ebx
+ movl 24(%edi), %edx
+ adcl $0, %ecx
+ # mul a[2]*b[6]
+ mull %edx
+ addl %eax, %ebp
+ movl 4(%esi), %eax
+ adcl %edx, %ebx
+ movl 28(%edi), %edx
+ adcl $0, %ecx
+ # mul a[1]*b[7]
+ mull %edx
+ addl %eax, %ebp
+ movl 20(%esp), %eax
+ adcl %edx, %ebx
+ movl 8(%edi), %edx
+ adcl $0, %ecx
+ movl %ebp, 32(%eax)
+ movl 28(%esi), %eax
+ # saved r[8]
+ # ################## Calculate word 9
+ xorl %ebp, %ebp
+ # mul a[7]*b[2]
+ mull %edx
+ addl %eax, %ebx
+ movl 24(%esi), %eax
+ adcl %edx, %ecx
+ movl 12(%edi), %edx
+ adcl $0, %ebp
+ # mul a[6]*b[3]
+ mull %edx
+ addl %eax, %ebx
+ movl 20(%esi), %eax
+ adcl %edx, %ecx
+ movl 16(%edi), %edx
+ adcl $0, %ebp
+ # mul a[5]*b[4]
+ mull %edx
+ addl %eax, %ebx
+ movl 16(%esi), %eax
+ adcl %edx, %ecx
+ movl 20(%edi), %edx
+ adcl $0, %ebp
+ # mul a[4]*b[5]
+ mull %edx
+ addl %eax, %ebx
+ movl 12(%esi), %eax
+ adcl %edx, %ecx
+ movl 24(%edi), %edx
+ adcl $0, %ebp
+ # mul a[3]*b[6]
+ mull %edx
+ addl %eax, %ebx
+ movl 8(%esi), %eax
+ adcl %edx, %ecx
+ movl 28(%edi), %edx
+ adcl $0, %ebp
+ # mul a[2]*b[7]
+ mull %edx
+ addl %eax, %ebx
+ movl 20(%esp), %eax
+ adcl %edx, %ecx
+ movl 12(%edi), %edx
+ adcl $0, %ebp
+ movl %ebx, 36(%eax)
+ movl 28(%esi), %eax
+ # saved r[9]
+ # ################## Calculate word 10
+ xorl %ebx, %ebx
+ # mul a[7]*b[3]
+ mull %edx
+ addl %eax, %ecx
+ movl 24(%esi), %eax
+ adcl %edx, %ebp
+ movl 16(%edi), %edx
+ adcl $0, %ebx
+ # mul a[6]*b[4]
+ mull %edx
+ addl %eax, %ecx
+ movl 20(%esi), %eax
+ adcl %edx, %ebp
+ movl 20(%edi), %edx
+ adcl $0, %ebx
+ # mul a[5]*b[5]
+ mull %edx
+ addl %eax, %ecx
+ movl 16(%esi), %eax
+ adcl %edx, %ebp
+ movl 24(%edi), %edx
+ adcl $0, %ebx
+ # mul a[4]*b[6]
+ mull %edx
+ addl %eax, %ecx
+ movl 12(%esi), %eax
+ adcl %edx, %ebp
+ movl 28(%edi), %edx
+ adcl $0, %ebx
+ # mul a[3]*b[7]
+ mull %edx
+ addl %eax, %ecx
+ movl 20(%esp), %eax
+ adcl %edx, %ebp
+ movl 16(%edi), %edx
+ adcl $0, %ebx
+ movl %ecx, 40(%eax)
+ movl 28(%esi), %eax
+ # saved r[10]
+ # ################## Calculate word 11
+ xorl %ecx, %ecx
+ # mul a[7]*b[4]
+ mull %edx
+ addl %eax, %ebp
+ movl 24(%esi), %eax
+ adcl %edx, %ebx
+ movl 20(%edi), %edx
+ adcl $0, %ecx
+ # mul a[6]*b[5]
+ mull %edx
+ addl %eax, %ebp
+ movl 20(%esi), %eax
+ adcl %edx, %ebx
+ movl 24(%edi), %edx
+ adcl $0, %ecx
+ # mul a[5]*b[6]
+ mull %edx
+ addl %eax, %ebp
+ movl 16(%esi), %eax
+ adcl %edx, %ebx
+ movl 28(%edi), %edx
+ adcl $0, %ecx
+ # mul a[4]*b[7]
+ mull %edx
+ addl %eax, %ebp
+ movl 20(%esp), %eax
+ adcl %edx, %ebx
+ movl 20(%edi), %edx
+ adcl $0, %ecx
+ movl %ebp, 44(%eax)
+ movl 28(%esi), %eax
+ # saved r[11]
+ # ################## Calculate word 12
+ xorl %ebp, %ebp
+ # mul a[7]*b[5]
+ mull %edx
+ addl %eax, %ebx
+ movl 24(%esi), %eax
+ adcl %edx, %ecx
+ movl 24(%edi), %edx
+ adcl $0, %ebp
+ # mul a[6]*b[6]
+ mull %edx
+ addl %eax, %ebx
+ movl 20(%esi), %eax
+ adcl %edx, %ecx
+ movl 28(%edi), %edx
+ adcl $0, %ebp
+ # mul a[5]*b[7]
+ mull %edx
+ addl %eax, %ebx
+ movl 20(%esp), %eax
+ adcl %edx, %ecx
+ movl 24(%edi), %edx
+ adcl $0, %ebp
+ movl %ebx, 48(%eax)
+ movl 28(%esi), %eax
+ # saved r[12]
+ # ################## Calculate word 13
+ xorl %ebx, %ebx
+ # mul a[7]*b[6]
+ mull %edx
+ addl %eax, %ecx
+ movl 24(%esi), %eax
+ adcl %edx, %ebp
+ movl 28(%edi), %edx
+ adcl $0, %ebx
+ # mul a[6]*b[7]
+ mull %edx
+ addl %eax, %ecx
+ movl 20(%esp), %eax
+ adcl %edx, %ebp
+ movl 28(%edi), %edx
+ adcl $0, %ebx
+ movl %ecx, 52(%eax)
+ movl 28(%esi), %eax
+ # saved r[13]
+ # ################## Calculate word 14
+ xorl %ecx, %ecx
+ # mul a[7]*b[7]
+ mull %edx
+ addl %eax, %ebp
+ movl 20(%esp), %eax
+ adcl %edx, %ebx
+ adcl $0, %ecx
+ movl %ebp, 56(%eax)
+ # saved r[14]
+ # save r[15]
+ movl %ebx, 60(%eax)
+ popl %ebx
+ popl %ebp
+ popl %edi
+ popl %esi
+ ret
+.L_bn_mul_comba8_end:
+ .size bn_mul_comba8,.L_bn_mul_comba8_end-bn_mul_comba8
+.ident "desasm.pl"
+.text
+ .align 16
+.globl bn_mul_comba4
+ .type bn_mul_comba4,@function
+bn_mul_comba4:
+ pushl %esi
+ movl 12(%esp), %esi
+ pushl %edi
+ movl 20(%esp), %edi
+ pushl %ebp
+ pushl %ebx
+ xorl %ebx, %ebx
+ movl (%esi), %eax
+ xorl %ecx, %ecx
+ movl (%edi), %edx
+ # ################## Calculate word 0
+ xorl %ebp, %ebp
+ # mul a[0]*b[0]
+ mull %edx
+ addl %eax, %ebx
+ movl 20(%esp), %eax
+ adcl %edx, %ecx
+ movl (%edi), %edx
+ adcl $0, %ebp
+ movl %ebx, (%eax)
+ movl 4(%esi), %eax
+ # saved r[0]
+ # ################## Calculate word 1
+ xorl %ebx, %ebx
+ # mul a[1]*b[0]
+ mull %edx
+ addl %eax, %ecx
+ movl (%esi), %eax
+ adcl %edx, %ebp
+ movl 4(%edi), %edx
+ adcl $0, %ebx
+ # mul a[0]*b[1]
+ mull %edx
+ addl %eax, %ecx
+ movl 20(%esp), %eax
+ adcl %edx, %ebp
+ movl (%edi), %edx
+ adcl $0, %ebx
+ movl %ecx, 4(%eax)
+ movl 8(%esi), %eax
+ # saved r[1]
+ # ################## Calculate word 2
+ xorl %ecx, %ecx
+ # mul a[2]*b[0]
+ mull %edx
+ addl %eax, %ebp
+ movl 4(%esi), %eax
+ adcl %edx, %ebx
+ movl 4(%edi), %edx
+ adcl $0, %ecx
+ # mul a[1]*b[1]
+ mull %edx
+ addl %eax, %ebp
+ movl (%esi), %eax
+ adcl %edx, %ebx
+ movl 8(%edi), %edx
+ adcl $0, %ecx
+ # mul a[0]*b[2]
+ mull %edx
+ addl %eax, %ebp
+ movl 20(%esp), %eax
+ adcl %edx, %ebx
+ movl (%edi), %edx
+ adcl $0, %ecx
+ movl %ebp, 8(%eax)
+ movl 12(%esi), %eax
+ # saved r[2]
+ # ################## Calculate word 3
+ xorl %ebp, %ebp
+ # mul a[3]*b[0]
+ mull %edx
+ addl %eax, %ebx
+ movl 8(%esi), %eax
+ adcl %edx, %ecx
+ movl 4(%edi), %edx
+ adcl $0, %ebp
+ # mul a[2]*b[1]
+ mull %edx
+ addl %eax, %ebx
+ movl 4(%esi), %eax
+ adcl %edx, %ecx
+ movl 8(%edi), %edx
+ adcl $0, %ebp
+ # mul a[1]*b[2]
+ mull %edx
+ addl %eax, %ebx
+ movl (%esi), %eax
+ adcl %edx, %ecx
+ movl 12(%edi), %edx
+ adcl $0, %ebp
+ # mul a[0]*b[3]
+ mull %edx
+ addl %eax, %ebx
+ movl 20(%esp), %eax
+ adcl %edx, %ecx
+ movl 4(%edi), %edx
+ adcl $0, %ebp
+ movl %ebx, 12(%eax)
+ movl 12(%esi), %eax
+ # saved r[3]
+ # ################## Calculate word 4
+ xorl %ebx, %ebx
+ # mul a[3]*b[1]
+ mull %edx
+ addl %eax, %ecx
+ movl 8(%esi), %eax
+ adcl %edx, %ebp
+ movl 8(%edi), %edx
+ adcl $0, %ebx
+ # mul a[2]*b[2]
+ mull %edx
+ addl %eax, %ecx
+ movl 4(%esi), %eax
+ adcl %edx, %ebp
+ movl 12(%edi), %edx
+ adcl $0, %ebx
+ # mul a[1]*b[3]
+ mull %edx
+ addl %eax, %ecx
+ movl 20(%esp), %eax
+ adcl %edx, %ebp
+ movl 8(%edi), %edx
+ adcl $0, %ebx
+ movl %ecx, 16(%eax)
+ movl 12(%esi), %eax
+ # saved r[4]
+ # ################## Calculate word 5
+ xorl %ecx, %ecx
+ # mul a[3]*b[2]
+ mull %edx
+ addl %eax, %ebp
+ movl 8(%esi), %eax
+ adcl %edx, %ebx
+ movl 12(%edi), %edx
+ adcl $0, %ecx
+ # mul a[2]*b[3]
+ mull %edx
+ addl %eax, %ebp
+ movl 20(%esp), %eax
+ adcl %edx, %ebx
+ movl 12(%edi), %edx
+ adcl $0, %ecx
+ movl %ebp, 20(%eax)
+ movl 12(%esi), %eax
+ # saved r[5]
+ # ################## Calculate word 6
+ xorl %ebp, %ebp
+ # mul a[3]*b[3]
+ mull %edx
+ addl %eax, %ebx
+ movl 20(%esp), %eax
+ adcl %edx, %ecx
+ adcl $0, %ebp
+ movl %ebx, 24(%eax)
+ # saved r[6]
+ # save r[7]
+ movl %ecx, 28(%eax)
+ popl %ebx
+ popl %ebp
+ popl %edi
+ popl %esi
+ ret
+.L_bn_mul_comba4_end:
+ .size bn_mul_comba4,.L_bn_mul_comba4_end-bn_mul_comba4
+.ident "desasm.pl"
+.text
+ .align 16
+.globl bn_sqr_comba8
+ .type bn_sqr_comba8,@function
+bn_sqr_comba8:
+ pushl %esi
+ pushl %edi
+ pushl %ebp
+ pushl %ebx
+ movl 20(%esp), %edi
+ movl 24(%esp), %esi
+ xorl %ebx, %ebx
+ xorl %ecx, %ecx
+ movl (%esi), %eax
+ # ############### Calculate word 0
+ xorl %ebp, %ebp
+ # sqr a[0]*a[0]
+ mull %eax
+ addl %eax, %ebx
+ adcl %edx, %ecx
+ movl (%esi), %edx
+ adcl $0, %ebp
+ movl %ebx, (%edi)
+ movl 4(%esi), %eax
+ # saved r[0]
+ # ############### Calculate word 1
+ xorl %ebx, %ebx
+ # sqr a[1]*a[0]
+ mull %edx
+ addl %eax, %eax
+ adcl %edx, %edx
+ adcl $0, %ebx
+ addl %eax, %ecx
+ adcl %edx, %ebp
+ movl 8(%esi), %eax
+ adcl $0, %ebx
+ movl %ecx, 4(%edi)
+ movl (%esi), %edx
+ # saved r[1]
+ # ############### Calculate word 2
+ xorl %ecx, %ecx
+ # sqr a[2]*a[0]
+ mull %edx
+ addl %eax, %eax
+ adcl %edx, %edx
+ adcl $0, %ecx
+ addl %eax, %ebp
+ adcl %edx, %ebx
+ movl 4(%esi), %eax
+ adcl $0, %ecx
+ # sqr a[1]*a[1]
+ mull %eax
+ addl %eax, %ebp
+ adcl %edx, %ebx
+ movl (%esi), %edx
+ adcl $0, %ecx
+ movl %ebp, 8(%edi)
+ movl 12(%esi), %eax
+ # saved r[2]
+ # ############### Calculate word 3
+ xorl %ebp, %ebp
+ # sqr a[3]*a[0]
+ mull %edx
+ addl %eax, %eax
+ adcl %edx, %edx
+ adcl $0, %ebp
+ addl %eax, %ebx
+ adcl %edx, %ecx
+ movl 8(%esi), %eax
+ adcl $0, %ebp
+ movl 4(%esi), %edx
+ # sqr a[2]*a[1]
+ mull %edx
+ addl %eax, %eax
+ adcl %edx, %edx
+ adcl $0, %ebp
+ addl %eax, %ebx
+ adcl %edx, %ecx
+ movl 16(%esi), %eax
+ adcl $0, %ebp
+ movl %ebx, 12(%edi)
+ movl (%esi), %edx
+ # saved r[3]
+ # ############### Calculate word 4
+ xorl %ebx, %ebx
+ # sqr a[4]*a[0]
+ mull %edx
+ addl %eax, %eax
+ adcl %edx, %edx
+ adcl $0, %ebx
+ addl %eax, %ecx
+ adcl %edx, %ebp
+ movl 12(%esi), %eax
+ adcl $0, %ebx
+ movl 4(%esi), %edx
+ # sqr a[3]*a[1]
+ mull %edx
+ addl %eax, %eax
+ adcl %edx, %edx
+ adcl $0, %ebx
+ addl %eax, %ecx
+ adcl %edx, %ebp
+ movl 8(%esi), %eax
+ adcl $0, %ebx
+ # sqr a[2]*a[2]
+ mull %eax
+ addl %eax, %ecx
+ adcl %edx, %ebp
+ movl (%esi), %edx
+ adcl $0, %ebx
+ movl %ecx, 16(%edi)
+ movl 20(%esi), %eax
+ # saved r[4]
+ # ############### Calculate word 5
+ xorl %ecx, %ecx
+ # sqr a[5]*a[0]
+ mull %edx
+ addl %eax, %eax
+ adcl %edx, %edx
+ adcl $0, %ecx
+ addl %eax, %ebp
+ adcl %edx, %ebx
+ movl 16(%esi), %eax
+ adcl $0, %ecx
+ movl 4(%esi), %edx
+ # sqr a[4]*a[1]
+ mull %edx
+ addl %eax, %eax
+ adcl %edx, %edx
+ adcl $0, %ecx
+ addl %eax, %ebp
+ adcl %edx, %ebx
+ movl 12(%esi), %eax
+ adcl $0, %ecx
+ movl 8(%esi), %edx
+ # sqr a[3]*a[2]
+ mull %edx
+ addl %eax, %eax
+ adcl %edx, %edx
+ adcl $0, %ecx
+ addl %eax, %ebp
+ adcl %edx, %ebx
+ movl 24(%esi), %eax
+ adcl $0, %ecx
+ movl %ebp, 20(%edi)
+ movl (%esi), %edx
+ # saved r[5]
+ # ############### Calculate word 6
+ xorl %ebp, %ebp
+ # sqr a[6]*a[0]
+ mull %edx
+ addl %eax, %eax
+ adcl %edx, %edx
+ adcl $0, %ebp
+ addl %eax, %ebx
+ adcl %edx, %ecx
+ movl 20(%esi), %eax
+ adcl $0, %ebp
+ movl 4(%esi), %edx
+ # sqr a[5]*a[1]
+ mull %edx
+ addl %eax, %eax
+ adcl %edx, %edx
+ adcl $0, %ebp
+ addl %eax, %ebx
+ adcl %edx, %ecx
+ movl 16(%esi), %eax
+ adcl $0, %ebp
+ movl 8(%esi), %edx
+ # sqr a[4]*a[2]
+ mull %edx
+ addl %eax, %eax
+ adcl %edx, %edx
+ adcl $0, %ebp
+ addl %eax, %ebx
+ adcl %edx, %ecx
+ movl 12(%esi), %eax
+ adcl $0, %ebp
+ # sqr a[3]*a[3]
+ mull %eax
+ addl %eax, %ebx
+ adcl %edx, %ecx
+ movl (%esi), %edx
+ adcl $0, %ebp
+ movl %ebx, 24(%edi)
+ movl 28(%esi), %eax
+ # saved r[6]
+ # ############### Calculate word 7
+ xorl %ebx, %ebx
+ # sqr a[7]*a[0]
+ mull %edx
+ addl %eax, %eax
+ adcl %edx, %edx
+ adcl $0, %ebx
+ addl %eax, %ecx
+ adcl %edx, %ebp
+ movl 24(%esi), %eax
+ adcl $0, %ebx
+ movl 4(%esi), %edx
+ # sqr a[6]*a[1]
+ mull %edx
+ addl %eax, %eax
+ adcl %edx, %edx
+ adcl $0, %ebx
+ addl %eax, %ecx
+ adcl %edx, %ebp
+ movl 20(%esi), %eax
+ adcl $0, %ebx
+ movl 8(%esi), %edx
+ # sqr a[5]*a[2]
+ mull %edx
+ addl %eax, %eax
+ adcl %edx, %edx
+ adcl $0, %ebx
+ addl %eax, %ecx
+ adcl %edx, %ebp
+ movl 16(%esi), %eax
+ adcl $0, %ebx
+ movl 12(%esi), %edx
+ # sqr a[4]*a[3]
+ mull %edx
+ addl %eax, %eax
+ adcl %edx, %edx
+ adcl $0, %ebx
+ addl %eax, %ecx
+ adcl %edx, %ebp
+ movl 28(%esi), %eax
+ adcl $0, %ebx
+ movl %ecx, 28(%edi)
+ movl 4(%esi), %edx
+ # saved r[7]
+ # ############### Calculate word 8
+ xorl %ecx, %ecx
+ # sqr a[7]*a[1]
+ mull %edx
+ addl %eax, %eax
+ adcl %edx, %edx
+ adcl $0, %ecx
+ addl %eax, %ebp
+ adcl %edx, %ebx
+ movl 24(%esi), %eax
+ adcl $0, %ecx
+ movl 8(%esi), %edx
+ # sqr a[6]*a[2]
+ mull %edx
+ addl %eax, %eax
+ adcl %edx, %edx
+ adcl $0, %ecx
+ addl %eax, %ebp
+ adcl %edx, %ebx
+ movl 20(%esi), %eax
+ adcl $0, %ecx
+ movl 12(%esi), %edx
+ # sqr a[5]*a[3]
+ mull %edx
+ addl %eax, %eax
+ adcl %edx, %edx
+ adcl $0, %ecx
+ addl %eax, %ebp
+ adcl %edx, %ebx
+ movl 16(%esi), %eax
+ adcl $0, %ecx
+ # sqr a[4]*a[4]
+ mull %eax
+ addl %eax, %ebp
+ adcl %edx, %ebx
+ movl 8(%esi), %edx
+ adcl $0, %ecx
+ movl %ebp, 32(%edi)
+ movl 28(%esi), %eax
+ # saved r[8]
+ # ############### Calculate word 9
+ xorl %ebp, %ebp
+ # sqr a[7]*a[2]
+ mull %edx
+ addl %eax, %eax
+ adcl %edx, %edx
+ adcl $0, %ebp
+ addl %eax, %ebx
+ adcl %edx, %ecx
+ movl 24(%esi), %eax
+ adcl $0, %ebp
+ movl 12(%esi), %edx
+ # sqr a[6]*a[3]
+ mull %edx
+ addl %eax, %eax
+ adcl %edx, %edx
+ adcl $0, %ebp
+ addl %eax, %ebx
+ adcl %edx, %ecx
+ movl 20(%esi), %eax
+ adcl $0, %ebp
+ movl 16(%esi), %edx
+ # sqr a[5]*a[4]
+ mull %edx
+ addl %eax, %eax
+ adcl %edx, %edx
+ adcl $0, %ebp
+ addl %eax, %ebx
+ adcl %edx, %ecx
+ movl 28(%esi), %eax
+ adcl $0, %ebp
+ movl %ebx, 36(%edi)
+ movl 12(%esi), %edx
+ # saved r[9]
+ # ############### Calculate word 10
+ xorl %ebx, %ebx
+ # sqr a[7]*a[3]
+ mull %edx
+ addl %eax, %eax
+ adcl %edx, %edx
+ adcl $0, %ebx
+ addl %eax, %ecx
+ adcl %edx, %ebp
+ movl 24(%esi), %eax
+ adcl $0, %ebx
+ movl 16(%esi), %edx
+ # sqr a[6]*a[4]
+ mull %edx
+ addl %eax, %eax
+ adcl %edx, %edx
+ adcl $0, %ebx
+ addl %eax, %ecx
+ adcl %edx, %ebp
+ movl 20(%esi), %eax
+ adcl $0, %ebx
+ # sqr a[5]*a[5]
+ mull %eax
+ addl %eax, %ecx
+ adcl %edx, %ebp
+ movl 16(%esi), %edx
+ adcl $0, %ebx
+ movl %ecx, 40(%edi)
+ movl 28(%esi), %eax
+ # saved r[10]
+ # ############### Calculate word 11
+ xorl %ecx, %ecx
+ # sqr a[7]*a[4]
+ mull %edx
+ addl %eax, %eax
+ adcl %edx, %edx
+ adcl $0, %ecx
+ addl %eax, %ebp
+ adcl %edx, %ebx
+ movl 24(%esi), %eax
+ adcl $0, %ecx
+ movl 20(%esi), %edx
+ # sqr a[6]*a[5]
+ mull %edx
+ addl %eax, %eax
+ adcl %edx, %edx
+ adcl $0, %ecx
+ addl %eax, %ebp
+ adcl %edx, %ebx
+ movl 28(%esi), %eax
+ adcl $0, %ecx
+ movl %ebp, 44(%edi)
+ movl 20(%esi), %edx
+ # saved r[11]
+ # ############### Calculate word 12
+ xorl %ebp, %ebp
+ # sqr a[7]*a[5]
+ mull %edx
+ addl %eax, %eax
+ adcl %edx, %edx
+ adcl $0, %ebp
+ addl %eax, %ebx
+ adcl %edx, %ecx
+ movl 24(%esi), %eax
+ adcl $0, %ebp
+ # sqr a[6]*a[6]
+ mull %eax
+ addl %eax, %ebx
+ adcl %edx, %ecx
+ movl 24(%esi), %edx
+ adcl $0, %ebp
+ movl %ebx, 48(%edi)
+ movl 28(%esi), %eax
+ # saved r[12]
+ # ############### Calculate word 13
+ xorl %ebx, %ebx
+ # sqr a[7]*a[6]
+ mull %edx
+ addl %eax, %eax
+ adcl %edx, %edx
+ adcl $0, %ebx
+ addl %eax, %ecx
+ adcl %edx, %ebp
+ movl 28(%esi), %eax
+ adcl $0, %ebx
+ movl %ecx, 52(%edi)
+ # saved r[13]
+ # ############### Calculate word 14
+ xorl %ecx, %ecx
+ # sqr a[7]*a[7]
+ mull %eax
+ addl %eax, %ebp
+ adcl %edx, %ebx
+ adcl $0, %ecx
+ movl %ebp, 56(%edi)
+ # saved r[14]
+ movl %ebx, 60(%edi)
+ popl %ebx
+ popl %ebp
+ popl %edi
+ popl %esi
+ ret
+.L_bn_sqr_comba8_end:
+ .size bn_sqr_comba8,.L_bn_sqr_comba8_end-bn_sqr_comba8
+.ident "desasm.pl"
+.text
+ .align 16
+.globl bn_sqr_comba4
+ .type bn_sqr_comba4,@function
+bn_sqr_comba4:
+ pushl %esi
+ pushl %edi
+ pushl %ebp
+ pushl %ebx
+ movl 20(%esp), %edi
+ movl 24(%esp), %esi
+ xorl %ebx, %ebx
+ xorl %ecx, %ecx
+ movl (%esi), %eax
+ # ############### Calculate word 0
+ xorl %ebp, %ebp
+ # sqr a[0]*a[0]
+ mull %eax
+ addl %eax, %ebx
+ adcl %edx, %ecx
+ movl (%esi), %edx
+ adcl $0, %ebp
+ movl %ebx, (%edi)
+ movl 4(%esi), %eax
+ # saved r[0]
+ # ############### Calculate word 1
+ xorl %ebx, %ebx
+ # sqr a[1]*a[0]
+ mull %edx
+ addl %eax, %eax
+ adcl %edx, %edx
+ adcl $0, %ebx
+ addl %eax, %ecx
+ adcl %edx, %ebp
+ movl 8(%esi), %eax
+ adcl $0, %ebx
+ movl %ecx, 4(%edi)
+ movl (%esi), %edx
+ # saved r[1]
+ # ############### Calculate word 2
+ xorl %ecx, %ecx
+ # sqr a[2]*a[0]
+ mull %edx
+ addl %eax, %eax
+ adcl %edx, %edx
+ adcl $0, %ecx
+ addl %eax, %ebp
+ adcl %edx, %ebx
+ movl 4(%esi), %eax
+ adcl $0, %ecx
+ # sqr a[1]*a[1]
+ mull %eax
+ addl %eax, %ebp
+ adcl %edx, %ebx
+ movl (%esi), %edx
+ adcl $0, %ecx
+ movl %ebp, 8(%edi)
+ movl 12(%esi), %eax
+ # saved r[2]
+ # ############### Calculate word 3
+ xorl %ebp, %ebp
+ # sqr a[3]*a[0]
+ mull %edx
+ addl %eax, %eax
+ adcl %edx, %edx
+ adcl $0, %ebp
+ addl %eax, %ebx
+ adcl %edx, %ecx
+ movl 8(%esi), %eax
+ adcl $0, %ebp
+ movl 4(%esi), %edx
+ # sqr a[2]*a[1]
+ mull %edx
+ addl %eax, %eax
+ adcl %edx, %edx
+ adcl $0, %ebp
+ addl %eax, %ebx
+ adcl %edx, %ecx
+ movl 12(%esi), %eax
+ adcl $0, %ebp
+ movl %ebx, 12(%edi)
+ movl 4(%esi), %edx
+ # saved r[3]
+ # ############### Calculate word 4
+ xorl %ebx, %ebx
+ # sqr a[3]*a[1]
+ mull %edx
+ addl %eax, %eax
+ adcl %edx, %edx
+ adcl $0, %ebx
+ addl %eax, %ecx
+ adcl %edx, %ebp
+ movl 8(%esi), %eax
+ adcl $0, %ebx
+ # sqr a[2]*a[2]
+ mull %eax
+ addl %eax, %ecx
+ adcl %edx, %ebp
+ movl 8(%esi), %edx
+ adcl $0, %ebx
+ movl %ecx, 16(%edi)
+ movl 12(%esi), %eax
+ # saved r[4]
+ # ############### Calculate word 5
+ xorl %ecx, %ecx
+ # sqr a[3]*a[2]
+ mull %edx
+ addl %eax, %eax
+ adcl %edx, %edx
+ adcl $0, %ecx
+ addl %eax, %ebp
+ adcl %edx, %ebx
+ movl 12(%esi), %eax
+ adcl $0, %ecx
+ movl %ebp, 20(%edi)
+ # saved r[5]
+ # ############### Calculate word 6
+ xorl %ebp, %ebp
+ # sqr a[3]*a[3]
+ mull %eax
+ addl %eax, %ebx
+ adcl %edx, %ecx
+ adcl $0, %ebp
+ movl %ebx, 24(%edi)
+ # saved r[6]
+ movl %ecx, 28(%edi)
+ popl %ebx
+ popl %ebp
+ popl %edi
+ popl %esi
+ ret
+.L_bn_sqr_comba4_end:
+ .size bn_sqr_comba4,.L_bn_sqr_comba4_end-bn_sqr_comba4
+.ident "desasm.pl"
diff --git a/secure/lib/libcrypto/i386/crypt586.s b/secure/lib/libcrypto/i386/crypt586.s
new file mode 100644
index 0000000..e80834e
--- /dev/null
+++ b/secure/lib/libcrypto/i386/crypt586.s
@@ -0,0 +1,933 @@
+ # $FreeBSD$
+ # Dont even think of reading this code
+ # It was automatically generated by crypt586.pl
+ # Which is a perl program used to generate the x86 assember for
+ # any of elf, a.out, BSDI, Win32, gaswin (for GNU as on Win32) or Solaris
+ # eric <eay@cryptsoft.com>
+
+ .file "crypt586.s"
+ .version "01.01"
+gcc2_compiled.:
+.text
+ .align 16
+.globl fcrypt_body
+ .type fcrypt_body,@function
+fcrypt_body:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+
+
+ # Load the 2 words
+ xorl %edi, %edi
+ xorl %esi, %esi
+ movl 24(%esp), %ebp
+ pushl $25
+.L000start:
+
+ # Round 0
+ movl 32(%esp), %eax
+ movl %esi, %edx
+ shrl $16, %edx
+ movl 36(%esp), %ecx
+ xorl %esi, %edx
+ andl %edx, %eax
+ andl %ecx, %edx
+ movl %eax, %ebx
+ sall $16, %ebx
+ movl %edx, %ecx
+ sall $16, %ecx
+ xorl %ebx, %eax
+ xorl %ecx, %edx
+ movl (%ebp), %ebx
+ xorl %ebx, %eax
+ movl 4(%ebp), %ecx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ xorl %ecx, %edx
+ andl $0xfcfcfcfc, %eax
+ xorl %ebx, %ebx
+ andl $0xcfcfcfcf, %edx
+ xorl %ecx, %ecx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 28(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 1
+ movl 32(%esp), %eax
+ movl %edi, %edx
+ shrl $16, %edx
+ movl 36(%esp), %ecx
+ xorl %edi, %edx
+ andl %edx, %eax
+ andl %ecx, %edx
+ movl %eax, %ebx
+ sall $16, %ebx
+ movl %edx, %ecx
+ sall $16, %ecx
+ xorl %ebx, %eax
+ xorl %ecx, %edx
+ movl 8(%ebp), %ebx
+ xorl %ebx, %eax
+ movl 12(%ebp), %ecx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ xorl %ecx, %edx
+ andl $0xfcfcfcfc, %eax
+ xorl %ebx, %ebx
+ andl $0xcfcfcfcf, %edx
+ xorl %ecx, %ecx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 28(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+
+ # Round 2
+ movl 32(%esp), %eax
+ movl %esi, %edx
+ shrl $16, %edx
+ movl 36(%esp), %ecx
+ xorl %esi, %edx
+ andl %edx, %eax
+ andl %ecx, %edx
+ movl %eax, %ebx
+ sall $16, %ebx
+ movl %edx, %ecx
+ sall $16, %ecx
+ xorl %ebx, %eax
+ xorl %ecx, %edx
+ movl 16(%ebp), %ebx
+ xorl %ebx, %eax
+ movl 20(%ebp), %ecx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ xorl %ecx, %edx
+ andl $0xfcfcfcfc, %eax
+ xorl %ebx, %ebx
+ andl $0xcfcfcfcf, %edx
+ xorl %ecx, %ecx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 28(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 3
+ movl 32(%esp), %eax
+ movl %edi, %edx
+ shrl $16, %edx
+ movl 36(%esp), %ecx
+ xorl %edi, %edx
+ andl %edx, %eax
+ andl %ecx, %edx
+ movl %eax, %ebx
+ sall $16, %ebx
+ movl %edx, %ecx
+ sall $16, %ecx
+ xorl %ebx, %eax
+ xorl %ecx, %edx
+ movl 24(%ebp), %ebx
+ xorl %ebx, %eax
+ movl 28(%ebp), %ecx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ xorl %ecx, %edx
+ andl $0xfcfcfcfc, %eax
+ xorl %ebx, %ebx
+ andl $0xcfcfcfcf, %edx
+ xorl %ecx, %ecx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 28(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+
+ # Round 4
+ movl 32(%esp), %eax
+ movl %esi, %edx
+ shrl $16, %edx
+ movl 36(%esp), %ecx
+ xorl %esi, %edx
+ andl %edx, %eax
+ andl %ecx, %edx
+ movl %eax, %ebx
+ sall $16, %ebx
+ movl %edx, %ecx
+ sall $16, %ecx
+ xorl %ebx, %eax
+ xorl %ecx, %edx
+ movl 32(%ebp), %ebx
+ xorl %ebx, %eax
+ movl 36(%ebp), %ecx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ xorl %ecx, %edx
+ andl $0xfcfcfcfc, %eax
+ xorl %ebx, %ebx
+ andl $0xcfcfcfcf, %edx
+ xorl %ecx, %ecx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 28(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 5
+ movl 32(%esp), %eax
+ movl %edi, %edx
+ shrl $16, %edx
+ movl 36(%esp), %ecx
+ xorl %edi, %edx
+ andl %edx, %eax
+ andl %ecx, %edx
+ movl %eax, %ebx
+ sall $16, %ebx
+ movl %edx, %ecx
+ sall $16, %ecx
+ xorl %ebx, %eax
+ xorl %ecx, %edx
+ movl 40(%ebp), %ebx
+ xorl %ebx, %eax
+ movl 44(%ebp), %ecx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ xorl %ecx, %edx
+ andl $0xfcfcfcfc, %eax
+ xorl %ebx, %ebx
+ andl $0xcfcfcfcf, %edx
+ xorl %ecx, %ecx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 28(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+
+ # Round 6
+ movl 32(%esp), %eax
+ movl %esi, %edx
+ shrl $16, %edx
+ movl 36(%esp), %ecx
+ xorl %esi, %edx
+ andl %edx, %eax
+ andl %ecx, %edx
+ movl %eax, %ebx
+ sall $16, %ebx
+ movl %edx, %ecx
+ sall $16, %ecx
+ xorl %ebx, %eax
+ xorl %ecx, %edx
+ movl 48(%ebp), %ebx
+ xorl %ebx, %eax
+ movl 52(%ebp), %ecx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ xorl %ecx, %edx
+ andl $0xfcfcfcfc, %eax
+ xorl %ebx, %ebx
+ andl $0xcfcfcfcf, %edx
+ xorl %ecx, %ecx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 28(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 7
+ movl 32(%esp), %eax
+ movl %edi, %edx
+ shrl $16, %edx
+ movl 36(%esp), %ecx
+ xorl %edi, %edx
+ andl %edx, %eax
+ andl %ecx, %edx
+ movl %eax, %ebx
+ sall $16, %ebx
+ movl %edx, %ecx
+ sall $16, %ecx
+ xorl %ebx, %eax
+ xorl %ecx, %edx
+ movl 56(%ebp), %ebx
+ xorl %ebx, %eax
+ movl 60(%ebp), %ecx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ xorl %ecx, %edx
+ andl $0xfcfcfcfc, %eax
+ xorl %ebx, %ebx
+ andl $0xcfcfcfcf, %edx
+ xorl %ecx, %ecx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 28(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+
+ # Round 8
+ movl 32(%esp), %eax
+ movl %esi, %edx
+ shrl $16, %edx
+ movl 36(%esp), %ecx
+ xorl %esi, %edx
+ andl %edx, %eax
+ andl %ecx, %edx
+ movl %eax, %ebx
+ sall $16, %ebx
+ movl %edx, %ecx
+ sall $16, %ecx
+ xorl %ebx, %eax
+ xorl %ecx, %edx
+ movl 64(%ebp), %ebx
+ xorl %ebx, %eax
+ movl 68(%ebp), %ecx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ xorl %ecx, %edx
+ andl $0xfcfcfcfc, %eax
+ xorl %ebx, %ebx
+ andl $0xcfcfcfcf, %edx
+ xorl %ecx, %ecx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 28(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 9
+ movl 32(%esp), %eax
+ movl %edi, %edx
+ shrl $16, %edx
+ movl 36(%esp), %ecx
+ xorl %edi, %edx
+ andl %edx, %eax
+ andl %ecx, %edx
+ movl %eax, %ebx
+ sall $16, %ebx
+ movl %edx, %ecx
+ sall $16, %ecx
+ xorl %ebx, %eax
+ xorl %ecx, %edx
+ movl 72(%ebp), %ebx
+ xorl %ebx, %eax
+ movl 76(%ebp), %ecx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ xorl %ecx, %edx
+ andl $0xfcfcfcfc, %eax
+ xorl %ebx, %ebx
+ andl $0xcfcfcfcf, %edx
+ xorl %ecx, %ecx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 28(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+
+ # Round 10
+ movl 32(%esp), %eax
+ movl %esi, %edx
+ shrl $16, %edx
+ movl 36(%esp), %ecx
+ xorl %esi, %edx
+ andl %edx, %eax
+ andl %ecx, %edx
+ movl %eax, %ebx
+ sall $16, %ebx
+ movl %edx, %ecx
+ sall $16, %ecx
+ xorl %ebx, %eax
+ xorl %ecx, %edx
+ movl 80(%ebp), %ebx
+ xorl %ebx, %eax
+ movl 84(%ebp), %ecx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ xorl %ecx, %edx
+ andl $0xfcfcfcfc, %eax
+ xorl %ebx, %ebx
+ andl $0xcfcfcfcf, %edx
+ xorl %ecx, %ecx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 28(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 11
+ movl 32(%esp), %eax
+ movl %edi, %edx
+ shrl $16, %edx
+ movl 36(%esp), %ecx
+ xorl %edi, %edx
+ andl %edx, %eax
+ andl %ecx, %edx
+ movl %eax, %ebx
+ sall $16, %ebx
+ movl %edx, %ecx
+ sall $16, %ecx
+ xorl %ebx, %eax
+ xorl %ecx, %edx
+ movl 88(%ebp), %ebx
+ xorl %ebx, %eax
+ movl 92(%ebp), %ecx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ xorl %ecx, %edx
+ andl $0xfcfcfcfc, %eax
+ xorl %ebx, %ebx
+ andl $0xcfcfcfcf, %edx
+ xorl %ecx, %ecx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 28(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+
+ # Round 12
+ movl 32(%esp), %eax
+ movl %esi, %edx
+ shrl $16, %edx
+ movl 36(%esp), %ecx
+ xorl %esi, %edx
+ andl %edx, %eax
+ andl %ecx, %edx
+ movl %eax, %ebx
+ sall $16, %ebx
+ movl %edx, %ecx
+ sall $16, %ecx
+ xorl %ebx, %eax
+ xorl %ecx, %edx
+ movl 96(%ebp), %ebx
+ xorl %ebx, %eax
+ movl 100(%ebp), %ecx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ xorl %ecx, %edx
+ andl $0xfcfcfcfc, %eax
+ xorl %ebx, %ebx
+ andl $0xcfcfcfcf, %edx
+ xorl %ecx, %ecx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 28(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 13
+ movl 32(%esp), %eax
+ movl %edi, %edx
+ shrl $16, %edx
+ movl 36(%esp), %ecx
+ xorl %edi, %edx
+ andl %edx, %eax
+ andl %ecx, %edx
+ movl %eax, %ebx
+ sall $16, %ebx
+ movl %edx, %ecx
+ sall $16, %ecx
+ xorl %ebx, %eax
+ xorl %ecx, %edx
+ movl 104(%ebp), %ebx
+ xorl %ebx, %eax
+ movl 108(%ebp), %ecx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ xorl %ecx, %edx
+ andl $0xfcfcfcfc, %eax
+ xorl %ebx, %ebx
+ andl $0xcfcfcfcf, %edx
+ xorl %ecx, %ecx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 28(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+
+ # Round 14
+ movl 32(%esp), %eax
+ movl %esi, %edx
+ shrl $16, %edx
+ movl 36(%esp), %ecx
+ xorl %esi, %edx
+ andl %edx, %eax
+ andl %ecx, %edx
+ movl %eax, %ebx
+ sall $16, %ebx
+ movl %edx, %ecx
+ sall $16, %ecx
+ xorl %ebx, %eax
+ xorl %ecx, %edx
+ movl 112(%ebp), %ebx
+ xorl %ebx, %eax
+ movl 116(%ebp), %ecx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ xorl %ecx, %edx
+ andl $0xfcfcfcfc, %eax
+ xorl %ebx, %ebx
+ andl $0xcfcfcfcf, %edx
+ xorl %ecx, %ecx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 28(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 15
+ movl 32(%esp), %eax
+ movl %edi, %edx
+ shrl $16, %edx
+ movl 36(%esp), %ecx
+ xorl %edi, %edx
+ andl %edx, %eax
+ andl %ecx, %edx
+ movl %eax, %ebx
+ sall $16, %ebx
+ movl %edx, %ecx
+ sall $16, %ecx
+ xorl %ebx, %eax
+ xorl %ecx, %edx
+ movl 120(%ebp), %ebx
+ xorl %ebx, %eax
+ movl 124(%ebp), %ecx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ xorl %ecx, %edx
+ andl $0xfcfcfcfc, %eax
+ xorl %ebx, %ebx
+ andl $0xcfcfcfcf, %edx
+ xorl %ecx, %ecx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 28(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+ movl (%esp), %ebx
+ movl %edi, %eax
+ decl %ebx
+ movl %esi, %edi
+ movl %eax, %esi
+ movl %ebx, (%esp)
+ jnz .L000start
+
+ # FP
+ movl 24(%esp), %edx
+.byte 209
+.byte 207 # rorl $1 %edi
+ movl %esi, %eax
+ xorl %edi, %esi
+ andl $0xaaaaaaaa, %esi
+ xorl %esi, %eax
+ xorl %esi, %edi
+
+ roll $23, %eax
+ movl %eax, %esi
+ xorl %edi, %eax
+ andl $0x03fc03fc, %eax
+ xorl %eax, %esi
+ xorl %eax, %edi
+
+ roll $10, %esi
+ movl %esi, %eax
+ xorl %edi, %esi
+ andl $0x33333333, %esi
+ xorl %esi, %eax
+ xorl %esi, %edi
+
+ roll $18, %edi
+ movl %edi, %esi
+ xorl %eax, %edi
+ andl $0xfff0000f, %edi
+ xorl %edi, %esi
+ xorl %edi, %eax
+
+ roll $12, %esi
+ movl %esi, %edi
+ xorl %eax, %esi
+ andl $0xf0f0f0f0, %esi
+ xorl %esi, %edi
+ xorl %esi, %eax
+
+ rorl $4, %eax
+ movl %eax, (%edx)
+ movl %edi, 4(%edx)
+ popl %ecx
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.fcrypt_body_end:
+ .size fcrypt_body,.fcrypt_body_end-fcrypt_body
+.ident "fcrypt_body"
diff --git a/secure/lib/libcrypto/i386/des-586.s b/secure/lib/libcrypto/i386/des-586.s
new file mode 100644
index 0000000..55ddd71
--- /dev/null
+++ b/secure/lib/libcrypto/i386/des-586.s
@@ -0,0 +1,3154 @@
+ # $FreeBSD$
+ # Dont even think of reading this code
+ # It was automatically generated by des-586.pl
+ # Which is a perl program used to generate the x86 assember for
+ # any of elf, a.out, BSDI, Win32, gaswin (for GNU as on Win32) or Solaris
+ # eric <eay@cryptsoft.com>
+
+ .file "des-586.s"
+ .version "01.01"
+gcc2_compiled.:
+.text
+ .align 16
+.globl des_encrypt1
+ .type des_encrypt1,@function
+des_encrypt1:
+ pushl %esi
+ pushl %edi
+
+ # Load the 2 words
+ movl 12(%esp), %esi
+ xorl %ecx, %ecx
+ pushl %ebx
+ pushl %ebp
+ movl (%esi), %eax
+ movl 28(%esp), %ebx
+ movl 4(%esi), %edi
+
+ # IP
+ roll $4, %eax
+ movl %eax, %esi
+ xorl %edi, %eax
+ andl $0xf0f0f0f0, %eax
+ xorl %eax, %esi
+ xorl %eax, %edi
+
+ roll $20, %edi
+ movl %edi, %eax
+ xorl %esi, %edi
+ andl $0xfff0000f, %edi
+ xorl %edi, %eax
+ xorl %edi, %esi
+
+ roll $14, %eax
+ movl %eax, %edi
+ xorl %esi, %eax
+ andl $0x33333333, %eax
+ xorl %eax, %edi
+ xorl %eax, %esi
+
+ roll $22, %esi
+ movl %esi, %eax
+ xorl %edi, %esi
+ andl $0x03fc03fc, %esi
+ xorl %esi, %eax
+ xorl %esi, %edi
+
+ roll $9, %eax
+ movl %eax, %esi
+ xorl %edi, %eax
+ andl $0xaaaaaaaa, %eax
+ xorl %eax, %esi
+ xorl %eax, %edi
+
+.byte 209
+.byte 199 # roll $1 %edi
+ movl 24(%esp), %ebp
+ cmpl $0, %ebx
+ je .L000start_decrypt
+
+ # Round 0
+ movl (%ebp), %eax
+ xorl %ebx, %ebx
+ movl 4(%ebp), %edx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 1
+ movl 8(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 12(%ebp), %edx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+
+ # Round 2
+ movl 16(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 20(%ebp), %edx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 3
+ movl 24(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 28(%ebp), %edx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+
+ # Round 4
+ movl 32(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 36(%ebp), %edx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 5
+ movl 40(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 44(%ebp), %edx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+
+ # Round 6
+ movl 48(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 52(%ebp), %edx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 7
+ movl 56(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 60(%ebp), %edx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+
+ # Round 8
+ movl 64(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 68(%ebp), %edx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 9
+ movl 72(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 76(%ebp), %edx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+
+ # Round 10
+ movl 80(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 84(%ebp), %edx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 11
+ movl 88(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 92(%ebp), %edx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+
+ # Round 12
+ movl 96(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 100(%ebp), %edx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 13
+ movl 104(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 108(%ebp), %edx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+
+ # Round 14
+ movl 112(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 116(%ebp), %edx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 15
+ movl 120(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 124(%ebp), %edx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+ jmp .L001end
+.L000start_decrypt:
+
+ # Round 15
+ movl 120(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 124(%ebp), %edx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 14
+ movl 112(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 116(%ebp), %edx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+
+ # Round 13
+ movl 104(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 108(%ebp), %edx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 12
+ movl 96(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 100(%ebp), %edx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+
+ # Round 11
+ movl 88(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 92(%ebp), %edx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 10
+ movl 80(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 84(%ebp), %edx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+
+ # Round 9
+ movl 72(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 76(%ebp), %edx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 8
+ movl 64(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 68(%ebp), %edx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+
+ # Round 7
+ movl 56(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 60(%ebp), %edx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 6
+ movl 48(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 52(%ebp), %edx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+
+ # Round 5
+ movl 40(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 44(%ebp), %edx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 4
+ movl 32(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 36(%ebp), %edx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+
+ # Round 3
+ movl 24(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 28(%ebp), %edx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 2
+ movl 16(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 20(%ebp), %edx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+
+ # Round 1
+ movl 8(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 12(%ebp), %edx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 0
+ movl (%ebp), %eax
+ xorl %ebx, %ebx
+ movl 4(%ebp), %edx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+.L001end:
+
+ # FP
+ movl 20(%esp), %edx
+.byte 209
+.byte 206 # rorl $1 %esi
+ movl %edi, %eax
+ xorl %esi, %edi
+ andl $0xaaaaaaaa, %edi
+ xorl %edi, %eax
+ xorl %edi, %esi
+
+ roll $23, %eax
+ movl %eax, %edi
+ xorl %esi, %eax
+ andl $0x03fc03fc, %eax
+ xorl %eax, %edi
+ xorl %eax, %esi
+
+ roll $10, %edi
+ movl %edi, %eax
+ xorl %esi, %edi
+ andl $0x33333333, %edi
+ xorl %edi, %eax
+ xorl %edi, %esi
+
+ roll $18, %esi
+ movl %esi, %edi
+ xorl %eax, %esi
+ andl $0xfff0000f, %esi
+ xorl %esi, %edi
+ xorl %esi, %eax
+
+ roll $12, %edi
+ movl %edi, %esi
+ xorl %eax, %edi
+ andl $0xf0f0f0f0, %edi
+ xorl %edi, %esi
+ xorl %edi, %eax
+
+ rorl $4, %eax
+ movl %eax, (%edx)
+ movl %esi, 4(%edx)
+ popl %ebp
+ popl %ebx
+ popl %edi
+ popl %esi
+ ret
+.L_des_encrypt1_end:
+ .size des_encrypt1,.L_des_encrypt1_end-des_encrypt1
+.ident "desasm.pl"
+.text
+ .align 16
+.globl des_encrypt2
+ .type des_encrypt2,@function
+des_encrypt2:
+ pushl %esi
+ pushl %edi
+
+ # Load the 2 words
+ movl 12(%esp), %eax
+ xorl %ecx, %ecx
+ pushl %ebx
+ pushl %ebp
+ movl (%eax), %esi
+ movl 28(%esp), %ebx
+ roll $3, %esi
+ movl 4(%eax), %edi
+ roll $3, %edi
+ movl 24(%esp), %ebp
+ cmpl $0, %ebx
+ je .L002start_decrypt
+
+ # Round 0
+ movl (%ebp), %eax
+ xorl %ebx, %ebx
+ movl 4(%ebp), %edx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 1
+ movl 8(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 12(%ebp), %edx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+
+ # Round 2
+ movl 16(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 20(%ebp), %edx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 3
+ movl 24(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 28(%ebp), %edx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+
+ # Round 4
+ movl 32(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 36(%ebp), %edx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 5
+ movl 40(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 44(%ebp), %edx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+
+ # Round 6
+ movl 48(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 52(%ebp), %edx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 7
+ movl 56(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 60(%ebp), %edx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+
+ # Round 8
+ movl 64(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 68(%ebp), %edx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 9
+ movl 72(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 76(%ebp), %edx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+
+ # Round 10
+ movl 80(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 84(%ebp), %edx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 11
+ movl 88(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 92(%ebp), %edx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+
+ # Round 12
+ movl 96(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 100(%ebp), %edx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 13
+ movl 104(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 108(%ebp), %edx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+
+ # Round 14
+ movl 112(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 116(%ebp), %edx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 15
+ movl 120(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 124(%ebp), %edx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+ jmp .L003end
+.L002start_decrypt:
+
+ # Round 15
+ movl 120(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 124(%ebp), %edx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 14
+ movl 112(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 116(%ebp), %edx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+
+ # Round 13
+ movl 104(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 108(%ebp), %edx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 12
+ movl 96(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 100(%ebp), %edx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+
+ # Round 11
+ movl 88(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 92(%ebp), %edx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 10
+ movl 80(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 84(%ebp), %edx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+
+ # Round 9
+ movl 72(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 76(%ebp), %edx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 8
+ movl 64(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 68(%ebp), %edx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+
+ # Round 7
+ movl 56(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 60(%ebp), %edx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 6
+ movl 48(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 52(%ebp), %edx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+
+ # Round 5
+ movl 40(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 44(%ebp), %edx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 4
+ movl 32(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 36(%ebp), %edx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+
+ # Round 3
+ movl 24(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 28(%ebp), %edx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 2
+ movl 16(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 20(%ebp), %edx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+
+ # Round 1
+ movl 8(%ebp), %eax
+ xorl %ebx, %ebx
+ movl 12(%ebp), %edx
+ xorl %esi, %eax
+ xorl %esi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %edi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %edi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %edi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %edi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %edi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %edi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %edi
+
+ # Round 0
+ movl (%ebp), %eax
+ xorl %ebx, %ebx
+ movl 4(%ebp), %edx
+ xorl %edi, %eax
+ xorl %edi, %edx
+ andl $0xfcfcfcfc, %eax
+ andl $0xcfcfcfcf, %edx
+ movb %al, %bl
+ movb %ah, %cl
+ rorl $4, %edx
+ movl des_SPtrans(%ebx),%ebp
+ movb %dl, %bl
+ xorl %ebp, %esi
+ movl 0x200+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movb %dh, %cl
+ shrl $16, %eax
+ movl 0x100+des_SPtrans(%ebx),%ebp
+ xorl %ebp, %esi
+ movb %ah, %bl
+ shrl $16, %edx
+ movl 0x300+des_SPtrans(%ecx),%ebp
+ xorl %ebp, %esi
+ movl 24(%esp), %ebp
+ movb %dh, %cl
+ andl $0xff, %eax
+ andl $0xff, %edx
+ movl 0x600+des_SPtrans(%ebx),%ebx
+ xorl %ebx, %esi
+ movl 0x700+des_SPtrans(%ecx),%ebx
+ xorl %ebx, %esi
+ movl 0x400+des_SPtrans(%eax),%ebx
+ xorl %ebx, %esi
+ movl 0x500+des_SPtrans(%edx),%ebx
+ xorl %ebx, %esi
+.L003end:
+
+ # Fixup
+ rorl $3, %edi
+ movl 20(%esp), %eax
+ rorl $3, %esi
+ movl %edi, (%eax)
+ movl %esi, 4(%eax)
+ popl %ebp
+ popl %ebx
+ popl %edi
+ popl %esi
+ ret
+.L_des_encrypt2_end:
+ .size des_encrypt2,.L_des_encrypt2_end-des_encrypt2
+.ident "desasm.pl"
+.text
+ .align 16
+.globl des_encrypt3
+ .type des_encrypt3,@function
+des_encrypt3:
+ pushl %ebx
+ movl 8(%esp), %ebx
+ pushl %ebp
+ pushl %esi
+ pushl %edi
+
+ # Load the data words
+ movl (%ebx), %edi
+ movl 4(%ebx), %esi
+ subl $12, %esp
+
+ # IP
+ roll $4, %edi
+ movl %edi, %edx
+ xorl %esi, %edi
+ andl $0xf0f0f0f0, %edi
+ xorl %edi, %edx
+ xorl %edi, %esi
+
+ roll $20, %esi
+ movl %esi, %edi
+ xorl %edx, %esi
+ andl $0xfff0000f, %esi
+ xorl %esi, %edi
+ xorl %esi, %edx
+
+ roll $14, %edi
+ movl %edi, %esi
+ xorl %edx, %edi
+ andl $0x33333333, %edi
+ xorl %edi, %esi
+ xorl %edi, %edx
+
+ roll $22, %edx
+ movl %edx, %edi
+ xorl %esi, %edx
+ andl $0x03fc03fc, %edx
+ xorl %edx, %edi
+ xorl %edx, %esi
+
+ roll $9, %edi
+ movl %edi, %edx
+ xorl %esi, %edi
+ andl $0xaaaaaaaa, %edi
+ xorl %edi, %edx
+ xorl %edi, %esi
+
+ rorl $3, %edx
+ rorl $2, %esi
+ movl %esi, 4(%ebx)
+ movl 36(%esp), %eax
+ movl %edx, (%ebx)
+ movl 40(%esp), %edi
+ movl 44(%esp), %esi
+ movl $1, 8(%esp)
+ movl %eax, 4(%esp)
+ movl %ebx, (%esp)
+ call des_encrypt2
+ movl $0, 8(%esp)
+ movl %edi, 4(%esp)
+ movl %ebx, (%esp)
+ call des_encrypt2
+ movl $1, 8(%esp)
+ movl %esi, 4(%esp)
+ movl %ebx, (%esp)
+ call des_encrypt2
+ addl $12, %esp
+ movl (%ebx), %edi
+ movl 4(%ebx), %esi
+
+ # FP
+ roll $2, %esi
+ roll $3, %edi
+ movl %edi, %eax
+ xorl %esi, %edi
+ andl $0xaaaaaaaa, %edi
+ xorl %edi, %eax
+ xorl %edi, %esi
+
+ roll $23, %eax
+ movl %eax, %edi
+ xorl %esi, %eax
+ andl $0x03fc03fc, %eax
+ xorl %eax, %edi
+ xorl %eax, %esi
+
+ roll $10, %edi
+ movl %edi, %eax
+ xorl %esi, %edi
+ andl $0x33333333, %edi
+ xorl %edi, %eax
+ xorl %edi, %esi
+
+ roll $18, %esi
+ movl %esi, %edi
+ xorl %eax, %esi
+ andl $0xfff0000f, %esi
+ xorl %esi, %edi
+ xorl %esi, %eax
+
+ roll $12, %edi
+ movl %edi, %esi
+ xorl %eax, %edi
+ andl $0xf0f0f0f0, %edi
+ xorl %edi, %esi
+ xorl %edi, %eax
+
+ rorl $4, %eax
+ movl %eax, (%ebx)
+ movl %esi, 4(%ebx)
+ popl %edi
+ popl %esi
+ popl %ebp
+ popl %ebx
+ ret
+.L_des_encrypt3_end:
+ .size des_encrypt3,.L_des_encrypt3_end-des_encrypt3
+.ident "desasm.pl"
+.text
+ .align 16
+.globl des_decrypt3
+ .type des_decrypt3,@function
+des_decrypt3:
+ pushl %ebx
+ movl 8(%esp), %ebx
+ pushl %ebp
+ pushl %esi
+ pushl %edi
+
+ # Load the data words
+ movl (%ebx), %edi
+ movl 4(%ebx), %esi
+ subl $12, %esp
+
+ # IP
+ roll $4, %edi
+ movl %edi, %edx
+ xorl %esi, %edi
+ andl $0xf0f0f0f0, %edi
+ xorl %edi, %edx
+ xorl %edi, %esi
+
+ roll $20, %esi
+ movl %esi, %edi
+ xorl %edx, %esi
+ andl $0xfff0000f, %esi
+ xorl %esi, %edi
+ xorl %esi, %edx
+
+ roll $14, %edi
+ movl %edi, %esi
+ xorl %edx, %edi
+ andl $0x33333333, %edi
+ xorl %edi, %esi
+ xorl %edi, %edx
+
+ roll $22, %edx
+ movl %edx, %edi
+ xorl %esi, %edx
+ andl $0x03fc03fc, %edx
+ xorl %edx, %edi
+ xorl %edx, %esi
+
+ roll $9, %edi
+ movl %edi, %edx
+ xorl %esi, %edi
+ andl $0xaaaaaaaa, %edi
+ xorl %edi, %edx
+ xorl %edi, %esi
+
+ rorl $3, %edx
+ rorl $2, %esi
+ movl %esi, 4(%ebx)
+ movl 36(%esp), %esi
+ movl %edx, (%ebx)
+ movl 40(%esp), %edi
+ movl 44(%esp), %eax
+ movl $0, 8(%esp)
+ movl %eax, 4(%esp)
+ movl %ebx, (%esp)
+ call des_encrypt2
+ movl $1, 8(%esp)
+ movl %edi, 4(%esp)
+ movl %ebx, (%esp)
+ call des_encrypt2
+ movl $0, 8(%esp)
+ movl %esi, 4(%esp)
+ movl %ebx, (%esp)
+ call des_encrypt2
+ addl $12, %esp
+ movl (%ebx), %edi
+ movl 4(%ebx), %esi
+
+ # FP
+ roll $2, %esi
+ roll $3, %edi
+ movl %edi, %eax
+ xorl %esi, %edi
+ andl $0xaaaaaaaa, %edi
+ xorl %edi, %eax
+ xorl %edi, %esi
+
+ roll $23, %eax
+ movl %eax, %edi
+ xorl %esi, %eax
+ andl $0x03fc03fc, %eax
+ xorl %eax, %edi
+ xorl %eax, %esi
+
+ roll $10, %edi
+ movl %edi, %eax
+ xorl %esi, %edi
+ andl $0x33333333, %edi
+ xorl %edi, %eax
+ xorl %edi, %esi
+
+ roll $18, %esi
+ movl %esi, %edi
+ xorl %eax, %esi
+ andl $0xfff0000f, %esi
+ xorl %esi, %edi
+ xorl %esi, %eax
+
+ roll $12, %edi
+ movl %edi, %esi
+ xorl %eax, %edi
+ andl $0xf0f0f0f0, %edi
+ xorl %edi, %esi
+ xorl %edi, %eax
+
+ rorl $4, %eax
+ movl %eax, (%ebx)
+ movl %esi, 4(%ebx)
+ popl %edi
+ popl %esi
+ popl %ebp
+ popl %ebx
+ ret
+.L_des_decrypt3_end:
+ .size des_decrypt3,.L_des_decrypt3_end-des_decrypt3
+.ident "desasm.pl"
+.text
+ .align 16
+.globl des_ncbc_encrypt
+ .type des_ncbc_encrypt,@function
+des_ncbc_encrypt:
+
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 28(%esp), %ebp
+ # getting iv ptr from parameter 4
+ movl 36(%esp), %ebx
+ movl (%ebx), %esi
+ movl 4(%ebx), %edi
+ pushl %edi
+ pushl %esi
+ pushl %edi
+ pushl %esi
+ movl %esp, %ebx
+ movl 36(%esp), %esi
+ movl 40(%esp), %edi
+ # getting encrypt flag from parameter 5
+ movl 56(%esp), %ecx
+ # get and push parameter 5
+ pushl %ecx
+ # get and push parameter 3
+ movl 52(%esp), %eax
+ pushl %eax
+ pushl %ebx
+ cmpl $0, %ecx
+ jz .L004decrypt
+ andl $4294967288, %ebp
+ movl 12(%esp), %eax
+ movl 16(%esp), %ebx
+ jz .L005encrypt_finish
+.L006encrypt_loop:
+ movl (%esi), %ecx
+ movl 4(%esi), %edx
+ xorl %ecx, %eax
+ xorl %edx, %ebx
+ movl %eax, 12(%esp)
+ movl %ebx, 16(%esp)
+ call des_encrypt1
+ movl 12(%esp), %eax
+ movl 16(%esp), %ebx
+ movl %eax, (%edi)
+ movl %ebx, 4(%edi)
+ addl $8, %esi
+ addl $8, %edi
+ subl $8, %ebp
+ jnz .L006encrypt_loop
+.L005encrypt_finish:
+ movl 56(%esp), %ebp
+ andl $7, %ebp
+ jz .L007finish
+ xorl %ecx, %ecx
+ xorl %edx, %edx
+ movl .L008cbc_enc_jmp_table(,%ebp,4),%ebp
+ jmp *%ebp
+.L009ej7:
+ movb 6(%esi), %dh
+ sall $8, %edx
+.L010ej6:
+ movb 5(%esi), %dh
+.L011ej5:
+ movb 4(%esi), %dl
+.L012ej4:
+ movl (%esi), %ecx
+ jmp .L013ejend
+.L014ej3:
+ movb 2(%esi), %ch
+ sall $8, %ecx
+.L015ej2:
+ movb 1(%esi), %ch
+.L016ej1:
+ movb (%esi), %cl
+.L013ejend:
+ xorl %ecx, %eax
+ xorl %edx, %ebx
+ movl %eax, 12(%esp)
+ movl %ebx, 16(%esp)
+ call des_encrypt1
+ movl 12(%esp), %eax
+ movl 16(%esp), %ebx
+ movl %eax, (%edi)
+ movl %ebx, 4(%edi)
+ jmp .L007finish
+.align 16
+.L004decrypt:
+ andl $4294967288, %ebp
+ movl 20(%esp), %eax
+ movl 24(%esp), %ebx
+ jz .L017decrypt_finish
+.L018decrypt_loop:
+ movl (%esi), %eax
+ movl 4(%esi), %ebx
+ movl %eax, 12(%esp)
+ movl %ebx, 16(%esp)
+ call des_encrypt1
+ movl 12(%esp), %eax
+ movl 16(%esp), %ebx
+ movl 20(%esp), %ecx
+ movl 24(%esp), %edx
+ xorl %eax, %ecx
+ xorl %ebx, %edx
+ movl (%esi), %eax
+ movl 4(%esi), %ebx
+ movl %ecx, (%edi)
+ movl %edx, 4(%edi)
+ movl %eax, 20(%esp)
+ movl %ebx, 24(%esp)
+ addl $8, %esi
+ addl $8, %edi
+ subl $8, %ebp
+ jnz .L018decrypt_loop
+.L017decrypt_finish:
+ movl 56(%esp), %ebp
+ andl $7, %ebp
+ jz .L007finish
+ movl (%esi), %eax
+ movl 4(%esi), %ebx
+ movl %eax, 12(%esp)
+ movl %ebx, 16(%esp)
+ call des_encrypt1
+ movl 12(%esp), %eax
+ movl 16(%esp), %ebx
+ movl 20(%esp), %ecx
+ movl 24(%esp), %edx
+ xorl %eax, %ecx
+ xorl %ebx, %edx
+ movl (%esi), %eax
+ movl 4(%esi), %ebx
+.L019dj7:
+ rorl $16, %edx
+ movb %dl, 6(%edi)
+ shrl $16, %edx
+.L020dj6:
+ movb %dh, 5(%edi)
+.L021dj5:
+ movb %dl, 4(%edi)
+.L022dj4:
+ movl %ecx, (%edi)
+ jmp .L023djend
+.L024dj3:
+ rorl $16, %ecx
+ movb %cl, 2(%edi)
+ sall $16, %ecx
+.L025dj2:
+ movb %ch, 1(%esi)
+.L026dj1:
+ movb %cl, (%esi)
+.L023djend:
+ jmp .L007finish
+.align 16
+.L007finish:
+ movl 64(%esp), %ecx
+ addl $28, %esp
+ movl %eax, (%ecx)
+ movl %ebx, 4(%ecx)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 16
+.L008cbc_enc_jmp_table:
+ .long 0
+ .long .L016ej1
+ .long .L015ej2
+ .long .L014ej3
+ .long .L012ej4
+ .long .L011ej5
+ .long .L010ej6
+ .long .L009ej7
+.align 16
+.L027cbc_dec_jmp_table:
+ .long 0
+ .long .L026dj1
+ .long .L025dj2
+ .long .L024dj3
+ .long .L022dj4
+ .long .L021dj5
+ .long .L020dj6
+ .long .L019dj7
+.L_des_ncbc_encrypt_end:
+ .size des_ncbc_encrypt,.L_des_ncbc_encrypt_end-des_ncbc_encrypt
+.ident "desasm.pl"
+.text
+ .align 16
+.globl des_ede3_cbc_encrypt
+ .type des_ede3_cbc_encrypt,@function
+des_ede3_cbc_encrypt:
+
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 28(%esp), %ebp
+ # getting iv ptr from parameter 6
+ movl 44(%esp), %ebx
+ movl (%ebx), %esi
+ movl 4(%ebx), %edi
+ pushl %edi
+ pushl %esi
+ pushl %edi
+ pushl %esi
+ movl %esp, %ebx
+ movl 36(%esp), %esi
+ movl 40(%esp), %edi
+ # getting encrypt flag from parameter 7
+ movl 64(%esp), %ecx
+ # get and push parameter 5
+ movl 56(%esp), %eax
+ pushl %eax
+ # get and push parameter 4
+ movl 56(%esp), %eax
+ pushl %eax
+ # get and push parameter 3
+ movl 56(%esp), %eax
+ pushl %eax
+ pushl %ebx
+ cmpl $0, %ecx
+ jz .L028decrypt
+ andl $4294967288, %ebp
+ movl 16(%esp), %eax
+ movl 20(%esp), %ebx
+ jz .L029encrypt_finish
+.L030encrypt_loop:
+ movl (%esi), %ecx
+ movl 4(%esi), %edx
+ xorl %ecx, %eax
+ xorl %edx, %ebx
+ movl %eax, 16(%esp)
+ movl %ebx, 20(%esp)
+ call des_encrypt3
+ movl 16(%esp), %eax
+ movl 20(%esp), %ebx
+ movl %eax, (%edi)
+ movl %ebx, 4(%edi)
+ addl $8, %esi
+ addl $8, %edi
+ subl $8, %ebp
+ jnz .L030encrypt_loop
+.L029encrypt_finish:
+ movl 60(%esp), %ebp
+ andl $7, %ebp
+ jz .L031finish
+ xorl %ecx, %ecx
+ xorl %edx, %edx
+ movl .L032cbc_enc_jmp_table(,%ebp,4),%ebp
+ jmp *%ebp
+.L033ej7:
+ movb 6(%esi), %dh
+ sall $8, %edx
+.L034ej6:
+ movb 5(%esi), %dh
+.L035ej5:
+ movb 4(%esi), %dl
+.L036ej4:
+ movl (%esi), %ecx
+ jmp .L037ejend
+.L038ej3:
+ movb 2(%esi), %ch
+ sall $8, %ecx
+.L039ej2:
+ movb 1(%esi), %ch
+.L040ej1:
+ movb (%esi), %cl
+.L037ejend:
+ xorl %ecx, %eax
+ xorl %edx, %ebx
+ movl %eax, 16(%esp)
+ movl %ebx, 20(%esp)
+ call des_encrypt3
+ movl 16(%esp), %eax
+ movl 20(%esp), %ebx
+ movl %eax, (%edi)
+ movl %ebx, 4(%edi)
+ jmp .L031finish
+.align 16
+.L028decrypt:
+ andl $4294967288, %ebp
+ movl 24(%esp), %eax
+ movl 28(%esp), %ebx
+ jz .L041decrypt_finish
+.L042decrypt_loop:
+ movl (%esi), %eax
+ movl 4(%esi), %ebx
+ movl %eax, 16(%esp)
+ movl %ebx, 20(%esp)
+ call des_decrypt3
+ movl 16(%esp), %eax
+ movl 20(%esp), %ebx
+ movl 24(%esp), %ecx
+ movl 28(%esp), %edx
+ xorl %eax, %ecx
+ xorl %ebx, %edx
+ movl (%esi), %eax
+ movl 4(%esi), %ebx
+ movl %ecx, (%edi)
+ movl %edx, 4(%edi)
+ movl %eax, 24(%esp)
+ movl %ebx, 28(%esp)
+ addl $8, %esi
+ addl $8, %edi
+ subl $8, %ebp
+ jnz .L042decrypt_loop
+.L041decrypt_finish:
+ movl 60(%esp), %ebp
+ andl $7, %ebp
+ jz .L031finish
+ movl (%esi), %eax
+ movl 4(%esi), %ebx
+ movl %eax, 16(%esp)
+ movl %ebx, 20(%esp)
+ call des_decrypt3
+ movl 16(%esp), %eax
+ movl 20(%esp), %ebx
+ movl 24(%esp), %ecx
+ movl 28(%esp), %edx
+ xorl %eax, %ecx
+ xorl %ebx, %edx
+ movl (%esi), %eax
+ movl 4(%esi), %ebx
+.L043dj7:
+ rorl $16, %edx
+ movb %dl, 6(%edi)
+ shrl $16, %edx
+.L044dj6:
+ movb %dh, 5(%edi)
+.L045dj5:
+ movb %dl, 4(%edi)
+.L046dj4:
+ movl %ecx, (%edi)
+ jmp .L047djend
+.L048dj3:
+ rorl $16, %ecx
+ movb %cl, 2(%edi)
+ sall $16, %ecx
+.L049dj2:
+ movb %ch, 1(%esi)
+.L050dj1:
+ movb %cl, (%esi)
+.L047djend:
+ jmp .L031finish
+.align 16
+.L031finish:
+ movl 76(%esp), %ecx
+ addl $32, %esp
+ movl %eax, (%ecx)
+ movl %ebx, 4(%ecx)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 16
+.L032cbc_enc_jmp_table:
+ .long 0
+ .long .L040ej1
+ .long .L039ej2
+ .long .L038ej3
+ .long .L036ej4
+ .long .L035ej5
+ .long .L034ej6
+ .long .L033ej7
+.align 16
+.L051cbc_dec_jmp_table:
+ .long 0
+ .long .L050dj1
+ .long .L049dj2
+ .long .L048dj3
+ .long .L046dj4
+ .long .L045dj5
+ .long .L044dj6
+ .long .L043dj7
+.L_des_ede3_cbc_encrypt_end:
+ .size des_ede3_cbc_encrypt,.L_des_ede3_cbc_encrypt_end-des_ede3_cbc_encrypt
+.ident "desasm.pl"
diff --git a/secure/lib/libcrypto/i386/md5-586.s b/secure/lib/libcrypto/i386/md5-586.s
new file mode 100644
index 0000000..5816cc5
--- /dev/null
+++ b/secure/lib/libcrypto/i386/md5-586.s
@@ -0,0 +1,689 @@
+ # $FreeBSD$
+ # Dont even think of reading this code
+ # It was automatically generated by md5-586.pl
+ # Which is a perl program used to generate the x86 assember for
+ # any of elf, a.out, BSDI, Win32, gaswin (for GNU as on Win32) or Solaris
+ # eric <eay@cryptsoft.com>
+
+ .file "md5-586.s"
+ .version "01.01"
+gcc2_compiled.:
+.text
+ .align 16
+.globl md5_block_asm_host_order
+ .type md5_block_asm_host_order,@function
+md5_block_asm_host_order:
+ pushl %esi
+ pushl %edi
+ movl 12(%esp), %edi
+ movl 16(%esp), %esi
+ movl 20(%esp), %ecx
+ pushl %ebp
+ sall $6, %ecx
+ pushl %ebx
+ addl %esi, %ecx
+ subl $64, %ecx
+ movl (%edi), %eax
+ pushl %ecx
+ movl 4(%edi), %ebx
+ movl 8(%edi), %ecx
+ movl 12(%edi), %edx
+.L000start:
+
+ # R0 section
+ movl %ecx, %edi
+ movl (%esi), %ebp
+ # R0 0
+ xorl %edx, %edi
+ andl %ebx, %edi
+ leal 3614090360(%eax,%ebp,1),%eax
+ xorl %edx, %edi
+ addl %edi, %eax
+ movl %ebx, %edi
+ roll $7, %eax
+ movl 4(%esi), %ebp
+ addl %ebx, %eax
+ # R0 1
+ xorl %ecx, %edi
+ andl %eax, %edi
+ leal 3905402710(%edx,%ebp,1),%edx
+ xorl %ecx, %edi
+ addl %edi, %edx
+ movl %eax, %edi
+ roll $12, %edx
+ movl 8(%esi), %ebp
+ addl %eax, %edx
+ # R0 2
+ xorl %ebx, %edi
+ andl %edx, %edi
+ leal 606105819(%ecx,%ebp,1),%ecx
+ xorl %ebx, %edi
+ addl %edi, %ecx
+ movl %edx, %edi
+ roll $17, %ecx
+ movl 12(%esi), %ebp
+ addl %edx, %ecx
+ # R0 3
+ xorl %eax, %edi
+ andl %ecx, %edi
+ leal 3250441966(%ebx,%ebp,1),%ebx
+ xorl %eax, %edi
+ addl %edi, %ebx
+ movl %ecx, %edi
+ roll $22, %ebx
+ movl 16(%esi), %ebp
+ addl %ecx, %ebx
+ # R0 4
+ xorl %edx, %edi
+ andl %ebx, %edi
+ leal 4118548399(%eax,%ebp,1),%eax
+ xorl %edx, %edi
+ addl %edi, %eax
+ movl %ebx, %edi
+ roll $7, %eax
+ movl 20(%esi), %ebp
+ addl %ebx, %eax
+ # R0 5
+ xorl %ecx, %edi
+ andl %eax, %edi
+ leal 1200080426(%edx,%ebp,1),%edx
+ xorl %ecx, %edi
+ addl %edi, %edx
+ movl %eax, %edi
+ roll $12, %edx
+ movl 24(%esi), %ebp
+ addl %eax, %edx
+ # R0 6
+ xorl %ebx, %edi
+ andl %edx, %edi
+ leal 2821735955(%ecx,%ebp,1),%ecx
+ xorl %ebx, %edi
+ addl %edi, %ecx
+ movl %edx, %edi
+ roll $17, %ecx
+ movl 28(%esi), %ebp
+ addl %edx, %ecx
+ # R0 7
+ xorl %eax, %edi
+ andl %ecx, %edi
+ leal 4249261313(%ebx,%ebp,1),%ebx
+ xorl %eax, %edi
+ addl %edi, %ebx
+ movl %ecx, %edi
+ roll $22, %ebx
+ movl 32(%esi), %ebp
+ addl %ecx, %ebx
+ # R0 8
+ xorl %edx, %edi
+ andl %ebx, %edi
+ leal 1770035416(%eax,%ebp,1),%eax
+ xorl %edx, %edi
+ addl %edi, %eax
+ movl %ebx, %edi
+ roll $7, %eax
+ movl 36(%esi), %ebp
+ addl %ebx, %eax
+ # R0 9
+ xorl %ecx, %edi
+ andl %eax, %edi
+ leal 2336552879(%edx,%ebp,1),%edx
+ xorl %ecx, %edi
+ addl %edi, %edx
+ movl %eax, %edi
+ roll $12, %edx
+ movl 40(%esi), %ebp
+ addl %eax, %edx
+ # R0 10
+ xorl %ebx, %edi
+ andl %edx, %edi
+ leal 4294925233(%ecx,%ebp,1),%ecx
+ xorl %ebx, %edi
+ addl %edi, %ecx
+ movl %edx, %edi
+ roll $17, %ecx
+ movl 44(%esi), %ebp
+ addl %edx, %ecx
+ # R0 11
+ xorl %eax, %edi
+ andl %ecx, %edi
+ leal 2304563134(%ebx,%ebp,1),%ebx
+ xorl %eax, %edi
+ addl %edi, %ebx
+ movl %ecx, %edi
+ roll $22, %ebx
+ movl 48(%esi), %ebp
+ addl %ecx, %ebx
+ # R0 12
+ xorl %edx, %edi
+ andl %ebx, %edi
+ leal 1804603682(%eax,%ebp,1),%eax
+ xorl %edx, %edi
+ addl %edi, %eax
+ movl %ebx, %edi
+ roll $7, %eax
+ movl 52(%esi), %ebp
+ addl %ebx, %eax
+ # R0 13
+ xorl %ecx, %edi
+ andl %eax, %edi
+ leal 4254626195(%edx,%ebp,1),%edx
+ xorl %ecx, %edi
+ addl %edi, %edx
+ movl %eax, %edi
+ roll $12, %edx
+ movl 56(%esi), %ebp
+ addl %eax, %edx
+ # R0 14
+ xorl %ebx, %edi
+ andl %edx, %edi
+ leal 2792965006(%ecx,%ebp,1),%ecx
+ xorl %ebx, %edi
+ addl %edi, %ecx
+ movl %edx, %edi
+ roll $17, %ecx
+ movl 60(%esi), %ebp
+ addl %edx, %ecx
+ # R0 15
+ xorl %eax, %edi
+ andl %ecx, %edi
+ leal 1236535329(%ebx,%ebp,1),%ebx
+ xorl %eax, %edi
+ addl %edi, %ebx
+ movl %ecx, %edi
+ roll $22, %ebx
+ movl 4(%esi), %ebp
+ addl %ecx, %ebx
+
+ # R1 section
+ # R1 16
+ leal 4129170786(%eax,%ebp,1),%eax
+ xorl %ebx, %edi
+ andl %edx, %edi
+ movl 24(%esi), %ebp
+ xorl %ecx, %edi
+ addl %edi, %eax
+ movl %ebx, %edi
+ roll $5, %eax
+ addl %ebx, %eax
+ # R1 17
+ leal 3225465664(%edx,%ebp,1),%edx
+ xorl %eax, %edi
+ andl %ecx, %edi
+ movl 44(%esi), %ebp
+ xorl %ebx, %edi
+ addl %edi, %edx
+ movl %eax, %edi
+ roll $9, %edx
+ addl %eax, %edx
+ # R1 18
+ leal 643717713(%ecx,%ebp,1),%ecx
+ xorl %edx, %edi
+ andl %ebx, %edi
+ movl (%esi), %ebp
+ xorl %eax, %edi
+ addl %edi, %ecx
+ movl %edx, %edi
+ roll $14, %ecx
+ addl %edx, %ecx
+ # R1 19
+ leal 3921069994(%ebx,%ebp,1),%ebx
+ xorl %ecx, %edi
+ andl %eax, %edi
+ movl 20(%esi), %ebp
+ xorl %edx, %edi
+ addl %edi, %ebx
+ movl %ecx, %edi
+ roll $20, %ebx
+ addl %ecx, %ebx
+ # R1 20
+ leal 3593408605(%eax,%ebp,1),%eax
+ xorl %ebx, %edi
+ andl %edx, %edi
+ movl 40(%esi), %ebp
+ xorl %ecx, %edi
+ addl %edi, %eax
+ movl %ebx, %edi
+ roll $5, %eax
+ addl %ebx, %eax
+ # R1 21
+ leal 38016083(%edx,%ebp,1),%edx
+ xorl %eax, %edi
+ andl %ecx, %edi
+ movl 60(%esi), %ebp
+ xorl %ebx, %edi
+ addl %edi, %edx
+ movl %eax, %edi
+ roll $9, %edx
+ addl %eax, %edx
+ # R1 22
+ leal 3634488961(%ecx,%ebp,1),%ecx
+ xorl %edx, %edi
+ andl %ebx, %edi
+ movl 16(%esi), %ebp
+ xorl %eax, %edi
+ addl %edi, %ecx
+ movl %edx, %edi
+ roll $14, %ecx
+ addl %edx, %ecx
+ # R1 23
+ leal 3889429448(%ebx,%ebp,1),%ebx
+ xorl %ecx, %edi
+ andl %eax, %edi
+ movl 36(%esi), %ebp
+ xorl %edx, %edi
+ addl %edi, %ebx
+ movl %ecx, %edi
+ roll $20, %ebx
+ addl %ecx, %ebx
+ # R1 24
+ leal 568446438(%eax,%ebp,1),%eax
+ xorl %ebx, %edi
+ andl %edx, %edi
+ movl 56(%esi), %ebp
+ xorl %ecx, %edi
+ addl %edi, %eax
+ movl %ebx, %edi
+ roll $5, %eax
+ addl %ebx, %eax
+ # R1 25
+ leal 3275163606(%edx,%ebp,1),%edx
+ xorl %eax, %edi
+ andl %ecx, %edi
+ movl 12(%esi), %ebp
+ xorl %ebx, %edi
+ addl %edi, %edx
+ movl %eax, %edi
+ roll $9, %edx
+ addl %eax, %edx
+ # R1 26
+ leal 4107603335(%ecx,%ebp,1),%ecx
+ xorl %edx, %edi
+ andl %ebx, %edi
+ movl 32(%esi), %ebp
+ xorl %eax, %edi
+ addl %edi, %ecx
+ movl %edx, %edi
+ roll $14, %ecx
+ addl %edx, %ecx
+ # R1 27
+ leal 1163531501(%ebx,%ebp,1),%ebx
+ xorl %ecx, %edi
+ andl %eax, %edi
+ movl 52(%esi), %ebp
+ xorl %edx, %edi
+ addl %edi, %ebx
+ movl %ecx, %edi
+ roll $20, %ebx
+ addl %ecx, %ebx
+ # R1 28
+ leal 2850285829(%eax,%ebp,1),%eax
+ xorl %ebx, %edi
+ andl %edx, %edi
+ movl 8(%esi), %ebp
+ xorl %ecx, %edi
+ addl %edi, %eax
+ movl %ebx, %edi
+ roll $5, %eax
+ addl %ebx, %eax
+ # R1 29
+ leal 4243563512(%edx,%ebp,1),%edx
+ xorl %eax, %edi
+ andl %ecx, %edi
+ movl 28(%esi), %ebp
+ xorl %ebx, %edi
+ addl %edi, %edx
+ movl %eax, %edi
+ roll $9, %edx
+ addl %eax, %edx
+ # R1 30
+ leal 1735328473(%ecx,%ebp,1),%ecx
+ xorl %edx, %edi
+ andl %ebx, %edi
+ movl 48(%esi), %ebp
+ xorl %eax, %edi
+ addl %edi, %ecx
+ movl %edx, %edi
+ roll $14, %ecx
+ addl %edx, %ecx
+ # R1 31
+ leal 2368359562(%ebx,%ebp,1),%ebx
+ xorl %ecx, %edi
+ andl %eax, %edi
+ movl 20(%esi), %ebp
+ xorl %edx, %edi
+ addl %edi, %ebx
+ movl %ecx, %edi
+ roll $20, %ebx
+ addl %ecx, %ebx
+
+ # R2 section
+ # R2 32
+ xorl %edx, %edi
+ xorl %ebx, %edi
+ leal 4294588738(%eax,%ebp,1),%eax
+ addl %edi, %eax
+ roll $4, %eax
+ movl 32(%esi), %ebp
+ movl %ebx, %edi
+ # R2 33
+ leal 2272392833(%edx,%ebp,1),%edx
+ addl %ebx, %eax
+ xorl %ecx, %edi
+ xorl %eax, %edi
+ movl 44(%esi), %ebp
+ addl %edi, %edx
+ movl %eax, %edi
+ roll $11, %edx
+ addl %eax, %edx
+ # R2 34
+ xorl %ebx, %edi
+ xorl %edx, %edi
+ leal 1839030562(%ecx,%ebp,1),%ecx
+ addl %edi, %ecx
+ roll $16, %ecx
+ movl 56(%esi), %ebp
+ movl %edx, %edi
+ # R2 35
+ leal 4259657740(%ebx,%ebp,1),%ebx
+ addl %edx, %ecx
+ xorl %eax, %edi
+ xorl %ecx, %edi
+ movl 4(%esi), %ebp
+ addl %edi, %ebx
+ movl %ecx, %edi
+ roll $23, %ebx
+ addl %ecx, %ebx
+ # R2 36
+ xorl %edx, %edi
+ xorl %ebx, %edi
+ leal 2763975236(%eax,%ebp,1),%eax
+ addl %edi, %eax
+ roll $4, %eax
+ movl 16(%esi), %ebp
+ movl %ebx, %edi
+ # R2 37
+ leal 1272893353(%edx,%ebp,1),%edx
+ addl %ebx, %eax
+ xorl %ecx, %edi
+ xorl %eax, %edi
+ movl 28(%esi), %ebp
+ addl %edi, %edx
+ movl %eax, %edi
+ roll $11, %edx
+ addl %eax, %edx
+ # R2 38
+ xorl %ebx, %edi
+ xorl %edx, %edi
+ leal 4139469664(%ecx,%ebp,1),%ecx
+ addl %edi, %ecx
+ roll $16, %ecx
+ movl 40(%esi), %ebp
+ movl %edx, %edi
+ # R2 39
+ leal 3200236656(%ebx,%ebp,1),%ebx
+ addl %edx, %ecx
+ xorl %eax, %edi
+ xorl %ecx, %edi
+ movl 52(%esi), %ebp
+ addl %edi, %ebx
+ movl %ecx, %edi
+ roll $23, %ebx
+ addl %ecx, %ebx
+ # R2 40
+ xorl %edx, %edi
+ xorl %ebx, %edi
+ leal 681279174(%eax,%ebp,1),%eax
+ addl %edi, %eax
+ roll $4, %eax
+ movl (%esi), %ebp
+ movl %ebx, %edi
+ # R2 41
+ leal 3936430074(%edx,%ebp,1),%edx
+ addl %ebx, %eax
+ xorl %ecx, %edi
+ xorl %eax, %edi
+ movl 12(%esi), %ebp
+ addl %edi, %edx
+ movl %eax, %edi
+ roll $11, %edx
+ addl %eax, %edx
+ # R2 42
+ xorl %ebx, %edi
+ xorl %edx, %edi
+ leal 3572445317(%ecx,%ebp,1),%ecx
+ addl %edi, %ecx
+ roll $16, %ecx
+ movl 24(%esi), %ebp
+ movl %edx, %edi
+ # R2 43
+ leal 76029189(%ebx,%ebp,1),%ebx
+ addl %edx, %ecx
+ xorl %eax, %edi
+ xorl %ecx, %edi
+ movl 36(%esi), %ebp
+ addl %edi, %ebx
+ movl %ecx, %edi
+ roll $23, %ebx
+ addl %ecx, %ebx
+ # R2 44
+ xorl %edx, %edi
+ xorl %ebx, %edi
+ leal 3654602809(%eax,%ebp,1),%eax
+ addl %edi, %eax
+ roll $4, %eax
+ movl 48(%esi), %ebp
+ movl %ebx, %edi
+ # R2 45
+ leal 3873151461(%edx,%ebp,1),%edx
+ addl %ebx, %eax
+ xorl %ecx, %edi
+ xorl %eax, %edi
+ movl 60(%esi), %ebp
+ addl %edi, %edx
+ movl %eax, %edi
+ roll $11, %edx
+ addl %eax, %edx
+ # R2 46
+ xorl %ebx, %edi
+ xorl %edx, %edi
+ leal 530742520(%ecx,%ebp,1),%ecx
+ addl %edi, %ecx
+ roll $16, %ecx
+ movl 8(%esi), %ebp
+ movl %edx, %edi
+ # R2 47
+ leal 3299628645(%ebx,%ebp,1),%ebx
+ addl %edx, %ecx
+ xorl %eax, %edi
+ xorl %ecx, %edi
+ movl (%esi), %ebp
+ addl %edi, %ebx
+ movl $-1, %edi
+ roll $23, %ebx
+ addl %ecx, %ebx
+
+ # R3 section
+ # R3 48
+ xorl %edx, %edi
+ orl %ebx, %edi
+ leal 4096336452(%eax,%ebp,1),%eax
+ xorl %ecx, %edi
+ movl 28(%esi), %ebp
+ addl %edi, %eax
+ movl $-1, %edi
+ roll $6, %eax
+ xorl %ecx, %edi
+ addl %ebx, %eax
+ # R3 49
+ orl %eax, %edi
+ leal 1126891415(%edx,%ebp,1),%edx
+ xorl %ebx, %edi
+ movl 56(%esi), %ebp
+ addl %edi, %edx
+ movl $-1, %edi
+ roll $10, %edx
+ xorl %ebx, %edi
+ addl %eax, %edx
+ # R3 50
+ orl %edx, %edi
+ leal 2878612391(%ecx,%ebp,1),%ecx
+ xorl %eax, %edi
+ movl 20(%esi), %ebp
+ addl %edi, %ecx
+ movl $-1, %edi
+ roll $15, %ecx
+ xorl %eax, %edi
+ addl %edx, %ecx
+ # R3 51
+ orl %ecx, %edi
+ leal 4237533241(%ebx,%ebp,1),%ebx
+ xorl %edx, %edi
+ movl 48(%esi), %ebp
+ addl %edi, %ebx
+ movl $-1, %edi
+ roll $21, %ebx
+ xorl %edx, %edi
+ addl %ecx, %ebx
+ # R3 52
+ orl %ebx, %edi
+ leal 1700485571(%eax,%ebp,1),%eax
+ xorl %ecx, %edi
+ movl 12(%esi), %ebp
+ addl %edi, %eax
+ movl $-1, %edi
+ roll $6, %eax
+ xorl %ecx, %edi
+ addl %ebx, %eax
+ # R3 53
+ orl %eax, %edi
+ leal 2399980690(%edx,%ebp,1),%edx
+ xorl %ebx, %edi
+ movl 40(%esi), %ebp
+ addl %edi, %edx
+ movl $-1, %edi
+ roll $10, %edx
+ xorl %ebx, %edi
+ addl %eax, %edx
+ # R3 54
+ orl %edx, %edi
+ leal 4293915773(%ecx,%ebp,1),%ecx
+ xorl %eax, %edi
+ movl 4(%esi), %ebp
+ addl %edi, %ecx
+ movl $-1, %edi
+ roll $15, %ecx
+ xorl %eax, %edi
+ addl %edx, %ecx
+ # R3 55
+ orl %ecx, %edi
+ leal 2240044497(%ebx,%ebp,1),%ebx
+ xorl %edx, %edi
+ movl 32(%esi), %ebp
+ addl %edi, %ebx
+ movl $-1, %edi
+ roll $21, %ebx
+ xorl %edx, %edi
+ addl %ecx, %ebx
+ # R3 56
+ orl %ebx, %edi
+ leal 1873313359(%eax,%ebp,1),%eax
+ xorl %ecx, %edi
+ movl 60(%esi), %ebp
+ addl %edi, %eax
+ movl $-1, %edi
+ roll $6, %eax
+ xorl %ecx, %edi
+ addl %ebx, %eax
+ # R3 57
+ orl %eax, %edi
+ leal 4264355552(%edx,%ebp,1),%edx
+ xorl %ebx, %edi
+ movl 24(%esi), %ebp
+ addl %edi, %edx
+ movl $-1, %edi
+ roll $10, %edx
+ xorl %ebx, %edi
+ addl %eax, %edx
+ # R3 58
+ orl %edx, %edi
+ leal 2734768916(%ecx,%ebp,1),%ecx
+ xorl %eax, %edi
+ movl 52(%esi), %ebp
+ addl %edi, %ecx
+ movl $-1, %edi
+ roll $15, %ecx
+ xorl %eax, %edi
+ addl %edx, %ecx
+ # R3 59
+ orl %ecx, %edi
+ leal 1309151649(%ebx,%ebp,1),%ebx
+ xorl %edx, %edi
+ movl 16(%esi), %ebp
+ addl %edi, %ebx
+ movl $-1, %edi
+ roll $21, %ebx
+ xorl %edx, %edi
+ addl %ecx, %ebx
+ # R3 60
+ orl %ebx, %edi
+ leal 4149444226(%eax,%ebp,1),%eax
+ xorl %ecx, %edi
+ movl 44(%esi), %ebp
+ addl %edi, %eax
+ movl $-1, %edi
+ roll $6, %eax
+ xorl %ecx, %edi
+ addl %ebx, %eax
+ # R3 61
+ orl %eax, %edi
+ leal 3174756917(%edx,%ebp,1),%edx
+ xorl %ebx, %edi
+ movl 8(%esi), %ebp
+ addl %edi, %edx
+ movl $-1, %edi
+ roll $10, %edx
+ xorl %ebx, %edi
+ addl %eax, %edx
+ # R3 62
+ orl %edx, %edi
+ leal 718787259(%ecx,%ebp,1),%ecx
+ xorl %eax, %edi
+ movl 36(%esi), %ebp
+ addl %edi, %ecx
+ movl $-1, %edi
+ roll $15, %ecx
+ xorl %eax, %edi
+ addl %edx, %ecx
+ # R3 63
+ orl %ecx, %edi
+ leal 3951481745(%ebx,%ebp,1),%ebx
+ xorl %edx, %edi
+ movl 24(%esp), %ebp
+ addl %edi, %ebx
+ addl $64, %esi
+ roll $21, %ebx
+ movl (%ebp), %edi
+ addl %ecx, %ebx
+ addl %edi, %eax
+ movl 4(%ebp), %edi
+ addl %edi, %ebx
+ movl 8(%ebp), %edi
+ addl %edi, %ecx
+ movl 12(%ebp), %edi
+ addl %edi, %edx
+ movl %eax, (%ebp)
+ movl %ebx, 4(%ebp)
+ movl (%esp), %edi
+ movl %ecx, 8(%ebp)
+ movl %edx, 12(%ebp)
+ cmpl %esi, %edi
+ jge .L000start
+ popl %eax
+ popl %ebx
+ popl %ebp
+ popl %edi
+ popl %esi
+ ret
+.L_md5_block_asm_host_order_end:
+ .size md5_block_asm_host_order,.L_md5_block_asm_host_order_end-md5_block_asm_host_order
+.ident "desasm.pl"
diff --git a/secure/lib/libcrypto/i386/rc4-586.s b/secure/lib/libcrypto/i386/rc4-586.s
new file mode 100644
index 0000000..996718c
--- /dev/null
+++ b/secure/lib/libcrypto/i386/rc4-586.s
@@ -0,0 +1,316 @@
+ # $FreeBSD$
+ # Dont even think of reading this code
+ # It was automatically generated by rc4-586.pl
+ # Which is a perl program used to generate the x86 assember for
+ # any of elf, a.out, BSDI, Win32, gaswin (for GNU as on Win32) or Solaris
+ # eric <eay@cryptsoft.com>
+
+ .file "rc4-586.s"
+ .version "01.01"
+gcc2_compiled.:
+.text
+ .align 16
+.globl RC4
+ .type RC4,@function
+RC4:
+
+ pushl %ebp
+ pushl %ebx
+ movl 12(%esp), %ebp
+ movl 16(%esp), %ebx
+ pushl %esi
+ pushl %edi
+ movl (%ebp), %ecx
+ movl 4(%ebp), %edx
+ movl 28(%esp), %esi
+ incl %ecx
+ subl $12, %esp
+ addl $8, %ebp
+ andl $255, %ecx
+ leal -8(%ebx,%esi), %ebx
+ movl 44(%esp), %edi
+ movl %ebx, 8(%esp)
+ movl (%ebp,%ecx,4), %eax
+ cmpl %esi, %ebx
+ jl .L000end
+.L001start:
+ addl $8, %esi
+ # Round 0
+ addl %eax, %edx
+ andl $255, %edx
+ incl %ecx
+ movl (%ebp,%edx,4), %ebx
+ movl %ebx, -4(%ebp,%ecx,4)
+ addl %eax, %ebx
+ andl $255, %ecx
+ andl $255, %ebx
+ movl %eax, (%ebp,%edx,4)
+ nop
+ movl (%ebp,%ebx,4), %ebx
+ movl (%ebp,%ecx,4), %eax
+ movb %bl, (%esp)
+ # Round 1
+ addl %eax, %edx
+ andl $255, %edx
+ incl %ecx
+ movl (%ebp,%edx,4), %ebx
+ movl %ebx, -4(%ebp,%ecx,4)
+ addl %eax, %ebx
+ andl $255, %ecx
+ andl $255, %ebx
+ movl %eax, (%ebp,%edx,4)
+ nop
+ movl (%ebp,%ebx,4), %ebx
+ movl (%ebp,%ecx,4), %eax
+ movb %bl, 1(%esp)
+ # Round 2
+ addl %eax, %edx
+ andl $255, %edx
+ incl %ecx
+ movl (%ebp,%edx,4), %ebx
+ movl %ebx, -4(%ebp,%ecx,4)
+ addl %eax, %ebx
+ andl $255, %ecx
+ andl $255, %ebx
+ movl %eax, (%ebp,%edx,4)
+ nop
+ movl (%ebp,%ebx,4), %ebx
+ movl (%ebp,%ecx,4), %eax
+ movb %bl, 2(%esp)
+ # Round 3
+ addl %eax, %edx
+ andl $255, %edx
+ incl %ecx
+ movl (%ebp,%edx,4), %ebx
+ movl %ebx, -4(%ebp,%ecx,4)
+ addl %eax, %ebx
+ andl $255, %ecx
+ andl $255, %ebx
+ movl %eax, (%ebp,%edx,4)
+ nop
+ movl (%ebp,%ebx,4), %ebx
+ movl (%ebp,%ecx,4), %eax
+ movb %bl, 3(%esp)
+ # Round 4
+ addl %eax, %edx
+ andl $255, %edx
+ incl %ecx
+ movl (%ebp,%edx,4), %ebx
+ movl %ebx, -4(%ebp,%ecx,4)
+ addl %eax, %ebx
+ andl $255, %ecx
+ andl $255, %ebx
+ movl %eax, (%ebp,%edx,4)
+ nop
+ movl (%ebp,%ebx,4), %ebx
+ movl (%ebp,%ecx,4), %eax
+ movb %bl, 4(%esp)
+ # Round 5
+ addl %eax, %edx
+ andl $255, %edx
+ incl %ecx
+ movl (%ebp,%edx,4), %ebx
+ movl %ebx, -4(%ebp,%ecx,4)
+ addl %eax, %ebx
+ andl $255, %ecx
+ andl $255, %ebx
+ movl %eax, (%ebp,%edx,4)
+ nop
+ movl (%ebp,%ebx,4), %ebx
+ movl (%ebp,%ecx,4), %eax
+ movb %bl, 5(%esp)
+ # Round 6
+ addl %eax, %edx
+ andl $255, %edx
+ incl %ecx
+ movl (%ebp,%edx,4), %ebx
+ movl %ebx, -4(%ebp,%ecx,4)
+ addl %eax, %ebx
+ andl $255, %ecx
+ andl $255, %ebx
+ movl %eax, (%ebp,%edx,4)
+ nop
+ movl (%ebp,%ebx,4), %ebx
+ movl (%ebp,%ecx,4), %eax
+ movb %bl, 6(%esp)
+ # Round 7
+ addl %eax, %edx
+ andl $255, %edx
+ incl %ecx
+ movl (%ebp,%edx,4), %ebx
+ movl %ebx, -4(%ebp,%ecx,4)
+ addl %eax, %ebx
+ andl $255, %ecx
+ andl $255, %ebx
+ movl %eax, (%ebp,%edx,4)
+ nop
+ movl (%ebp,%ebx,4), %ebx
+ addl $8, %edi
+ movb %bl, 7(%esp)
+ # apply the cipher text
+ movl (%esp), %eax
+ movl -8(%esi), %ebx
+ xorl %ebx, %eax
+ movl -4(%esi), %ebx
+ movl %eax, -8(%edi)
+ movl 4(%esp), %eax
+ xorl %ebx, %eax
+ movl 8(%esp), %ebx
+ movl %eax, -4(%edi)
+ movl (%ebp,%ecx,4), %eax
+ cmpl %ebx, %esi
+ jle .L001start
+.L000end:
+ # Round 0
+ addl $8, %ebx
+ incl %esi
+ cmpl %esi, %ebx
+ jl .L002finished
+ movl %ebx, 8(%esp)
+ addl %eax, %edx
+ andl $255, %edx
+ incl %ecx
+ movl (%ebp,%edx,4), %ebx
+ movl %ebx, -4(%ebp,%ecx,4)
+ addl %eax, %ebx
+ andl $255, %ecx
+ andl $255, %ebx
+ movl %eax, (%ebp,%edx,4)
+ nop
+ movl (%ebp,%ebx,4), %ebx
+ movl (%ebp,%ecx,4), %eax
+ movb -1(%esi), %bh
+ xorb %bh, %bl
+ movb %bl, (%edi)
+ # Round 1
+ movl 8(%esp), %ebx
+ cmpl %esi, %ebx
+ jle .L002finished
+ incl %esi
+ addl %eax, %edx
+ andl $255, %edx
+ incl %ecx
+ movl (%ebp,%edx,4), %ebx
+ movl %ebx, -4(%ebp,%ecx,4)
+ addl %eax, %ebx
+ andl $255, %ecx
+ andl $255, %ebx
+ movl %eax, (%ebp,%edx,4)
+ nop
+ movl (%ebp,%ebx,4), %ebx
+ movl (%ebp,%ecx,4), %eax
+ movb -1(%esi), %bh
+ xorb %bh, %bl
+ movb %bl, 1(%edi)
+ # Round 2
+ movl 8(%esp), %ebx
+ cmpl %esi, %ebx
+ jle .L002finished
+ incl %esi
+ addl %eax, %edx
+ andl $255, %edx
+ incl %ecx
+ movl (%ebp,%edx,4), %ebx
+ movl %ebx, -4(%ebp,%ecx,4)
+ addl %eax, %ebx
+ andl $255, %ecx
+ andl $255, %ebx
+ movl %eax, (%ebp,%edx,4)
+ nop
+ movl (%ebp,%ebx,4), %ebx
+ movl (%ebp,%ecx,4), %eax
+ movb -1(%esi), %bh
+ xorb %bh, %bl
+ movb %bl, 2(%edi)
+ # Round 3
+ movl 8(%esp), %ebx
+ cmpl %esi, %ebx
+ jle .L002finished
+ incl %esi
+ addl %eax, %edx
+ andl $255, %edx
+ incl %ecx
+ movl (%ebp,%edx,4), %ebx
+ movl %ebx, -4(%ebp,%ecx,4)
+ addl %eax, %ebx
+ andl $255, %ecx
+ andl $255, %ebx
+ movl %eax, (%ebp,%edx,4)
+ nop
+ movl (%ebp,%ebx,4), %ebx
+ movl (%ebp,%ecx,4), %eax
+ movb -1(%esi), %bh
+ xorb %bh, %bl
+ movb %bl, 3(%edi)
+ # Round 4
+ movl 8(%esp), %ebx
+ cmpl %esi, %ebx
+ jle .L002finished
+ incl %esi
+ addl %eax, %edx
+ andl $255, %edx
+ incl %ecx
+ movl (%ebp,%edx,4), %ebx
+ movl %ebx, -4(%ebp,%ecx,4)
+ addl %eax, %ebx
+ andl $255, %ecx
+ andl $255, %ebx
+ movl %eax, (%ebp,%edx,4)
+ nop
+ movl (%ebp,%ebx,4), %ebx
+ movl (%ebp,%ecx,4), %eax
+ movb -1(%esi), %bh
+ xorb %bh, %bl
+ movb %bl, 4(%edi)
+ # Round 5
+ movl 8(%esp), %ebx
+ cmpl %esi, %ebx
+ jle .L002finished
+ incl %esi
+ addl %eax, %edx
+ andl $255, %edx
+ incl %ecx
+ movl (%ebp,%edx,4), %ebx
+ movl %ebx, -4(%ebp,%ecx,4)
+ addl %eax, %ebx
+ andl $255, %ecx
+ andl $255, %ebx
+ movl %eax, (%ebp,%edx,4)
+ nop
+ movl (%ebp,%ebx,4), %ebx
+ movl (%ebp,%ecx,4), %eax
+ movb -1(%esi), %bh
+ xorb %bh, %bl
+ movb %bl, 5(%edi)
+ # Round 6
+ movl 8(%esp), %ebx
+ cmpl %esi, %ebx
+ jle .L002finished
+ incl %esi
+ addl %eax, %edx
+ andl $255, %edx
+ incl %ecx
+ movl (%ebp,%edx,4), %ebx
+ movl %ebx, -4(%ebp,%ecx,4)
+ addl %eax, %ebx
+ andl $255, %ecx
+ andl $255, %ebx
+ movl %eax, (%ebp,%edx,4)
+ nop
+ movl (%ebp,%ebx,4), %ebx
+ movb -1(%esi), %bh
+ xorb %bh, %bl
+ movb %bl, 6(%edi)
+.L002finished:
+ decl %ecx
+ addl $12, %esp
+ movl %edx, -4(%ebp)
+ movb %cl, -8(%ebp)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.RC4_end:
+ .size RC4,.RC4_end-RC4
+.ident "RC4"
diff --git a/secure/lib/libcrypto/i386/rc5-586.s b/secure/lib/libcrypto/i386/rc5-586.s
new file mode 100644
index 0000000..1a4c9d3
--- /dev/null
+++ b/secure/lib/libcrypto/i386/rc5-586.s
@@ -0,0 +1,584 @@
+ # $FreeBSD$
+ # Dont even think of reading this code
+ # It was automatically generated by rc5-586.pl
+ # Which is a perl program used to generate the x86 assember for
+ # any of elf, a.out, BSDI, Win32, gaswin (for GNU as on Win32) or Solaris
+ # eric <eay@cryptsoft.com>
+
+ .file "rc5-586.s"
+ .version "01.01"
+gcc2_compiled.:
+.text
+ .align 16
+.globl RC5_32_encrypt
+ .type RC5_32_encrypt,@function
+RC5_32_encrypt:
+
+ pushl %ebp
+ pushl %esi
+ pushl %edi
+ movl 16(%esp), %edx
+ movl 20(%esp), %ebp
+ # Load the 2 words
+ movl (%edx), %edi
+ movl 4(%edx), %esi
+ pushl %ebx
+ movl (%ebp), %ebx
+ addl 4(%ebp), %edi
+ addl 8(%ebp), %esi
+ xorl %esi, %edi
+ movl 12(%ebp), %eax
+ movl %esi, %ecx
+ roll %cl, %edi
+ addl %eax, %edi
+ xorl %edi, %esi
+ movl 16(%ebp), %eax
+ movl %edi, %ecx
+ roll %cl, %esi
+ addl %eax, %esi
+ xorl %esi, %edi
+ movl 20(%ebp), %eax
+ movl %esi, %ecx
+ roll %cl, %edi
+ addl %eax, %edi
+ xorl %edi, %esi
+ movl 24(%ebp), %eax
+ movl %edi, %ecx
+ roll %cl, %esi
+ addl %eax, %esi
+ xorl %esi, %edi
+ movl 28(%ebp), %eax
+ movl %esi, %ecx
+ roll %cl, %edi
+ addl %eax, %edi
+ xorl %edi, %esi
+ movl 32(%ebp), %eax
+ movl %edi, %ecx
+ roll %cl, %esi
+ addl %eax, %esi
+ xorl %esi, %edi
+ movl 36(%ebp), %eax
+ movl %esi, %ecx
+ roll %cl, %edi
+ addl %eax, %edi
+ xorl %edi, %esi
+ movl 40(%ebp), %eax
+ movl %edi, %ecx
+ roll %cl, %esi
+ addl %eax, %esi
+ xorl %esi, %edi
+ movl 44(%ebp), %eax
+ movl %esi, %ecx
+ roll %cl, %edi
+ addl %eax, %edi
+ xorl %edi, %esi
+ movl 48(%ebp), %eax
+ movl %edi, %ecx
+ roll %cl, %esi
+ addl %eax, %esi
+ xorl %esi, %edi
+ movl 52(%ebp), %eax
+ movl %esi, %ecx
+ roll %cl, %edi
+ addl %eax, %edi
+ xorl %edi, %esi
+ movl 56(%ebp), %eax
+ movl %edi, %ecx
+ roll %cl, %esi
+ addl %eax, %esi
+ xorl %esi, %edi
+ movl 60(%ebp), %eax
+ movl %esi, %ecx
+ roll %cl, %edi
+ addl %eax, %edi
+ xorl %edi, %esi
+ movl 64(%ebp), %eax
+ movl %edi, %ecx
+ roll %cl, %esi
+ addl %eax, %esi
+ xorl %esi, %edi
+ movl 68(%ebp), %eax
+ movl %esi, %ecx
+ roll %cl, %edi
+ addl %eax, %edi
+ xorl %edi, %esi
+ movl 72(%ebp), %eax
+ movl %edi, %ecx
+ roll %cl, %esi
+ addl %eax, %esi
+ cmpl $8, %ebx
+ je .L000rc5_exit
+ xorl %esi, %edi
+ movl 76(%ebp), %eax
+ movl %esi, %ecx
+ roll %cl, %edi
+ addl %eax, %edi
+ xorl %edi, %esi
+ movl 80(%ebp), %eax
+ movl %edi, %ecx
+ roll %cl, %esi
+ addl %eax, %esi
+ xorl %esi, %edi
+ movl 84(%ebp), %eax
+ movl %esi, %ecx
+ roll %cl, %edi
+ addl %eax, %edi
+ xorl %edi, %esi
+ movl 88(%ebp), %eax
+ movl %edi, %ecx
+ roll %cl, %esi
+ addl %eax, %esi
+ xorl %esi, %edi
+ movl 92(%ebp), %eax
+ movl %esi, %ecx
+ roll %cl, %edi
+ addl %eax, %edi
+ xorl %edi, %esi
+ movl 96(%ebp), %eax
+ movl %edi, %ecx
+ roll %cl, %esi
+ addl %eax, %esi
+ xorl %esi, %edi
+ movl 100(%ebp), %eax
+ movl %esi, %ecx
+ roll %cl, %edi
+ addl %eax, %edi
+ xorl %edi, %esi
+ movl 104(%ebp), %eax
+ movl %edi, %ecx
+ roll %cl, %esi
+ addl %eax, %esi
+ cmpl $12, %ebx
+ je .L000rc5_exit
+ xorl %esi, %edi
+ movl 108(%ebp), %eax
+ movl %esi, %ecx
+ roll %cl, %edi
+ addl %eax, %edi
+ xorl %edi, %esi
+ movl 112(%ebp), %eax
+ movl %edi, %ecx
+ roll %cl, %esi
+ addl %eax, %esi
+ xorl %esi, %edi
+ movl 116(%ebp), %eax
+ movl %esi, %ecx
+ roll %cl, %edi
+ addl %eax, %edi
+ xorl %edi, %esi
+ movl 120(%ebp), %eax
+ movl %edi, %ecx
+ roll %cl, %esi
+ addl %eax, %esi
+ xorl %esi, %edi
+ movl 124(%ebp), %eax
+ movl %esi, %ecx
+ roll %cl, %edi
+ addl %eax, %edi
+ xorl %edi, %esi
+ movl 128(%ebp), %eax
+ movl %edi, %ecx
+ roll %cl, %esi
+ addl %eax, %esi
+ xorl %esi, %edi
+ movl 132(%ebp), %eax
+ movl %esi, %ecx
+ roll %cl, %edi
+ addl %eax, %edi
+ xorl %edi, %esi
+ movl 136(%ebp), %eax
+ movl %edi, %ecx
+ roll %cl, %esi
+ addl %eax, %esi
+.L000rc5_exit:
+ movl %edi, (%edx)
+ movl %esi, 4(%edx)
+ popl %ebx
+ popl %edi
+ popl %esi
+ popl %ebp
+ ret
+.L_RC5_32_encrypt_end:
+ .size RC5_32_encrypt,.L_RC5_32_encrypt_end-RC5_32_encrypt
+.ident "desasm.pl"
+.text
+ .align 16
+.globl RC5_32_decrypt
+ .type RC5_32_decrypt,@function
+RC5_32_decrypt:
+
+ pushl %ebp
+ pushl %esi
+ pushl %edi
+ movl 16(%esp), %edx
+ movl 20(%esp), %ebp
+ # Load the 2 words
+ movl (%edx), %edi
+ movl 4(%edx), %esi
+ pushl %ebx
+ movl (%ebp), %ebx
+ cmpl $12, %ebx
+ je .L001rc5_dec_12
+ cmpl $8, %ebx
+ je .L002rc5_dec_8
+ movl 136(%ebp), %eax
+ subl %eax, %esi
+ movl %edi, %ecx
+ rorl %cl, %esi
+ xorl %edi, %esi
+ movl 132(%ebp), %eax
+ subl %eax, %edi
+ movl %esi, %ecx
+ rorl %cl, %edi
+ xorl %esi, %edi
+ movl 128(%ebp), %eax
+ subl %eax, %esi
+ movl %edi, %ecx
+ rorl %cl, %esi
+ xorl %edi, %esi
+ movl 124(%ebp), %eax
+ subl %eax, %edi
+ movl %esi, %ecx
+ rorl %cl, %edi
+ xorl %esi, %edi
+ movl 120(%ebp), %eax
+ subl %eax, %esi
+ movl %edi, %ecx
+ rorl %cl, %esi
+ xorl %edi, %esi
+ movl 116(%ebp), %eax
+ subl %eax, %edi
+ movl %esi, %ecx
+ rorl %cl, %edi
+ xorl %esi, %edi
+ movl 112(%ebp), %eax
+ subl %eax, %esi
+ movl %edi, %ecx
+ rorl %cl, %esi
+ xorl %edi, %esi
+ movl 108(%ebp), %eax
+ subl %eax, %edi
+ movl %esi, %ecx
+ rorl %cl, %edi
+ xorl %esi, %edi
+.L001rc5_dec_12:
+ movl 104(%ebp), %eax
+ subl %eax, %esi
+ movl %edi, %ecx
+ rorl %cl, %esi
+ xorl %edi, %esi
+ movl 100(%ebp), %eax
+ subl %eax, %edi
+ movl %esi, %ecx
+ rorl %cl, %edi
+ xorl %esi, %edi
+ movl 96(%ebp), %eax
+ subl %eax, %esi
+ movl %edi, %ecx
+ rorl %cl, %esi
+ xorl %edi, %esi
+ movl 92(%ebp), %eax
+ subl %eax, %edi
+ movl %esi, %ecx
+ rorl %cl, %edi
+ xorl %esi, %edi
+ movl 88(%ebp), %eax
+ subl %eax, %esi
+ movl %edi, %ecx
+ rorl %cl, %esi
+ xorl %edi, %esi
+ movl 84(%ebp), %eax
+ subl %eax, %edi
+ movl %esi, %ecx
+ rorl %cl, %edi
+ xorl %esi, %edi
+ movl 80(%ebp), %eax
+ subl %eax, %esi
+ movl %edi, %ecx
+ rorl %cl, %esi
+ xorl %edi, %esi
+ movl 76(%ebp), %eax
+ subl %eax, %edi
+ movl %esi, %ecx
+ rorl %cl, %edi
+ xorl %esi, %edi
+.L002rc5_dec_8:
+ movl 72(%ebp), %eax
+ subl %eax, %esi
+ movl %edi, %ecx
+ rorl %cl, %esi
+ xorl %edi, %esi
+ movl 68(%ebp), %eax
+ subl %eax, %edi
+ movl %esi, %ecx
+ rorl %cl, %edi
+ xorl %esi, %edi
+ movl 64(%ebp), %eax
+ subl %eax, %esi
+ movl %edi, %ecx
+ rorl %cl, %esi
+ xorl %edi, %esi
+ movl 60(%ebp), %eax
+ subl %eax, %edi
+ movl %esi, %ecx
+ rorl %cl, %edi
+ xorl %esi, %edi
+ movl 56(%ebp), %eax
+ subl %eax, %esi
+ movl %edi, %ecx
+ rorl %cl, %esi
+ xorl %edi, %esi
+ movl 52(%ebp), %eax
+ subl %eax, %edi
+ movl %esi, %ecx
+ rorl %cl, %edi
+ xorl %esi, %edi
+ movl 48(%ebp), %eax
+ subl %eax, %esi
+ movl %edi, %ecx
+ rorl %cl, %esi
+ xorl %edi, %esi
+ movl 44(%ebp), %eax
+ subl %eax, %edi
+ movl %esi, %ecx
+ rorl %cl, %edi
+ xorl %esi, %edi
+ movl 40(%ebp), %eax
+ subl %eax, %esi
+ movl %edi, %ecx
+ rorl %cl, %esi
+ xorl %edi, %esi
+ movl 36(%ebp), %eax
+ subl %eax, %edi
+ movl %esi, %ecx
+ rorl %cl, %edi
+ xorl %esi, %edi
+ movl 32(%ebp), %eax
+ subl %eax, %esi
+ movl %edi, %ecx
+ rorl %cl, %esi
+ xorl %edi, %esi
+ movl 28(%ebp), %eax
+ subl %eax, %edi
+ movl %esi, %ecx
+ rorl %cl, %edi
+ xorl %esi, %edi
+ movl 24(%ebp), %eax
+ subl %eax, %esi
+ movl %edi, %ecx
+ rorl %cl, %esi
+ xorl %edi, %esi
+ movl 20(%ebp), %eax
+ subl %eax, %edi
+ movl %esi, %ecx
+ rorl %cl, %edi
+ xorl %esi, %edi
+ movl 16(%ebp), %eax
+ subl %eax, %esi
+ movl %edi, %ecx
+ rorl %cl, %esi
+ xorl %edi, %esi
+ movl 12(%ebp), %eax
+ subl %eax, %edi
+ movl %esi, %ecx
+ rorl %cl, %edi
+ xorl %esi, %edi
+ subl 8(%ebp), %esi
+ subl 4(%ebp), %edi
+.L003rc5_exit:
+ movl %edi, (%edx)
+ movl %esi, 4(%edx)
+ popl %ebx
+ popl %edi
+ popl %esi
+ popl %ebp
+ ret
+.L_RC5_32_decrypt_end:
+ .size RC5_32_decrypt,.L_RC5_32_decrypt_end-RC5_32_decrypt
+.ident "desasm.pl"
+.text
+ .align 16
+.globl RC5_32_cbc_encrypt
+ .type RC5_32_cbc_encrypt,@function
+RC5_32_cbc_encrypt:
+
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 28(%esp), %ebp
+ # getting iv ptr from parameter 4
+ movl 36(%esp), %ebx
+ movl (%ebx), %esi
+ movl 4(%ebx), %edi
+ pushl %edi
+ pushl %esi
+ pushl %edi
+ pushl %esi
+ movl %esp, %ebx
+ movl 36(%esp), %esi
+ movl 40(%esp), %edi
+ # getting encrypt flag from parameter 5
+ movl 56(%esp), %ecx
+ # get and push parameter 3
+ movl 48(%esp), %eax
+ pushl %eax
+ pushl %ebx
+ cmpl $0, %ecx
+ jz .L004decrypt
+ andl $4294967288, %ebp
+ movl 8(%esp), %eax
+ movl 12(%esp), %ebx
+ jz .L005encrypt_finish
+.L006encrypt_loop:
+ movl (%esi), %ecx
+ movl 4(%esi), %edx
+ xorl %ecx, %eax
+ xorl %edx, %ebx
+ movl %eax, 8(%esp)
+ movl %ebx, 12(%esp)
+ call RC5_32_encrypt
+ movl 8(%esp), %eax
+ movl 12(%esp), %ebx
+ movl %eax, (%edi)
+ movl %ebx, 4(%edi)
+ addl $8, %esi
+ addl $8, %edi
+ subl $8, %ebp
+ jnz .L006encrypt_loop
+.L005encrypt_finish:
+ movl 52(%esp), %ebp
+ andl $7, %ebp
+ jz .L007finish
+ xorl %ecx, %ecx
+ xorl %edx, %edx
+ movl .L008cbc_enc_jmp_table(,%ebp,4),%ebp
+ jmp *%ebp
+.L009ej7:
+ movb 6(%esi), %dh
+ sall $8, %edx
+.L010ej6:
+ movb 5(%esi), %dh
+.L011ej5:
+ movb 4(%esi), %dl
+.L012ej4:
+ movl (%esi), %ecx
+ jmp .L013ejend
+.L014ej3:
+ movb 2(%esi), %ch
+ sall $8, %ecx
+.L015ej2:
+ movb 1(%esi), %ch
+.L016ej1:
+ movb (%esi), %cl
+.L013ejend:
+ xorl %ecx, %eax
+ xorl %edx, %ebx
+ movl %eax, 8(%esp)
+ movl %ebx, 12(%esp)
+ call RC5_32_encrypt
+ movl 8(%esp), %eax
+ movl 12(%esp), %ebx
+ movl %eax, (%edi)
+ movl %ebx, 4(%edi)
+ jmp .L007finish
+.align 16
+.L004decrypt:
+ andl $4294967288, %ebp
+ movl 16(%esp), %eax
+ movl 20(%esp), %ebx
+ jz .L017decrypt_finish
+.L018decrypt_loop:
+ movl (%esi), %eax
+ movl 4(%esi), %ebx
+ movl %eax, 8(%esp)
+ movl %ebx, 12(%esp)
+ call RC5_32_decrypt
+ movl 8(%esp), %eax
+ movl 12(%esp), %ebx
+ movl 16(%esp), %ecx
+ movl 20(%esp), %edx
+ xorl %eax, %ecx
+ xorl %ebx, %edx
+ movl (%esi), %eax
+ movl 4(%esi), %ebx
+ movl %ecx, (%edi)
+ movl %edx, 4(%edi)
+ movl %eax, 16(%esp)
+ movl %ebx, 20(%esp)
+ addl $8, %esi
+ addl $8, %edi
+ subl $8, %ebp
+ jnz .L018decrypt_loop
+.L017decrypt_finish:
+ movl 52(%esp), %ebp
+ andl $7, %ebp
+ jz .L007finish
+ movl (%esi), %eax
+ movl 4(%esi), %ebx
+ movl %eax, 8(%esp)
+ movl %ebx, 12(%esp)
+ call RC5_32_decrypt
+ movl 8(%esp), %eax
+ movl 12(%esp), %ebx
+ movl 16(%esp), %ecx
+ movl 20(%esp), %edx
+ xorl %eax, %ecx
+ xorl %ebx, %edx
+ movl (%esi), %eax
+ movl 4(%esi), %ebx
+.L019dj7:
+ rorl $16, %edx
+ movb %dl, 6(%edi)
+ shrl $16, %edx
+.L020dj6:
+ movb %dh, 5(%edi)
+.L021dj5:
+ movb %dl, 4(%edi)
+.L022dj4:
+ movl %ecx, (%edi)
+ jmp .L023djend
+.L024dj3:
+ rorl $16, %ecx
+ movb %cl, 2(%edi)
+ sall $16, %ecx
+.L025dj2:
+ movb %ch, 1(%esi)
+.L026dj1:
+ movb %cl, (%esi)
+.L023djend:
+ jmp .L007finish
+.align 16
+.L007finish:
+ movl 60(%esp), %ecx
+ addl $24, %esp
+ movl %eax, (%ecx)
+ movl %ebx, 4(%ecx)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 16
+.L008cbc_enc_jmp_table:
+ .long 0
+ .long .L016ej1
+ .long .L015ej2
+ .long .L014ej3
+ .long .L012ej4
+ .long .L011ej5
+ .long .L010ej6
+ .long .L009ej7
+.align 16
+.L027cbc_dec_jmp_table:
+ .long 0
+ .long .L026dj1
+ .long .L025dj2
+ .long .L024dj3
+ .long .L022dj4
+ .long .L021dj5
+ .long .L020dj6
+ .long .L019dj7
+.L_RC5_32_cbc_encrypt_end:
+ .size RC5_32_cbc_encrypt,.L_RC5_32_cbc_encrypt_end-RC5_32_cbc_encrypt
+.ident "desasm.pl"
diff --git a/secure/lib/libcrypto/i386/rmd-586.s b/secure/lib/libcrypto/i386/rmd-586.s
new file mode 100644
index 0000000..96f4928
--- /dev/null
+++ b/secure/lib/libcrypto/i386/rmd-586.s
@@ -0,0 +1,1975 @@
+ # $FreeBSD$
+ # Dont even think of reading this code
+ # It was automatically generated by rmd-586.pl
+ # Which is a perl program used to generate the x86 assember for
+ # any of elf, a.out, BSDI, Win32, gaswin (for GNU as on Win32) or Solaris
+ # eric <eay@cryptsoft.com>
+
+ .file "rmd-586.s"
+ .version "01.01"
+gcc2_compiled.:
+.text
+ .align 16
+.globl ripemd160_block_asm_host_order
+ .type ripemd160_block_asm_host_order,@function
+ripemd160_block_asm_host_order:
+ movl 4(%esp), %edx
+ movl 8(%esp), %eax
+ pushl %esi
+ movl (%edx), %ecx
+ pushl %edi
+ movl 4(%edx), %esi
+ pushl %ebp
+ movl 8(%edx), %edi
+ pushl %ebx
+ subl $108, %esp
+.L000start:
+
+ movl (%eax), %ebx
+ movl 4(%eax), %ebp
+ movl %ebx, (%esp)
+ movl %ebp, 4(%esp)
+ movl 8(%eax), %ebx
+ movl 12(%eax), %ebp
+ movl %ebx, 8(%esp)
+ movl %ebp, 12(%esp)
+ movl 16(%eax), %ebx
+ movl 20(%eax), %ebp
+ movl %ebx, 16(%esp)
+ movl %ebp, 20(%esp)
+ movl 24(%eax), %ebx
+ movl 28(%eax), %ebp
+ movl %ebx, 24(%esp)
+ movl %ebp, 28(%esp)
+ movl 32(%eax), %ebx
+ movl 36(%eax), %ebp
+ movl %ebx, 32(%esp)
+ movl %ebp, 36(%esp)
+ movl 40(%eax), %ebx
+ movl 44(%eax), %ebp
+ movl %ebx, 40(%esp)
+ movl %ebp, 44(%esp)
+ movl 48(%eax), %ebx
+ movl 52(%eax), %ebp
+ movl %ebx, 48(%esp)
+ movl %ebp, 52(%esp)
+ movl 56(%eax), %ebx
+ movl 60(%eax), %ebp
+ movl %ebx, 56(%esp)
+ movl %ebp, 60(%esp)
+ movl %edi, %eax
+ movl 12(%edx), %ebx
+ movl 16(%edx), %ebp
+ # 0
+ xorl %ebx, %eax
+ movl (%esp), %edx
+ xorl %esi, %eax
+ addl %edx, %ecx
+ roll $10, %edi
+ addl %eax, %ecx
+ movl %esi, %eax
+ roll $11, %ecx
+ addl %ebp, %ecx
+ # 1
+ xorl %edi, %eax
+ movl 4(%esp), %edx
+ xorl %ecx, %eax
+ addl %eax, %ebp
+ movl %ecx, %eax
+ roll $10, %esi
+ addl %edx, %ebp
+ xorl %esi, %eax
+ roll $14, %ebp
+ addl %ebx, %ebp
+ # 2
+ movl 8(%esp), %edx
+ xorl %ebp, %eax
+ addl %edx, %ebx
+ roll $10, %ecx
+ addl %eax, %ebx
+ movl %ebp, %eax
+ roll $15, %ebx
+ addl %edi, %ebx
+ # 3
+ xorl %ecx, %eax
+ movl 12(%esp), %edx
+ xorl %ebx, %eax
+ addl %eax, %edi
+ movl %ebx, %eax
+ roll $10, %ebp
+ addl %edx, %edi
+ xorl %ebp, %eax
+ roll $12, %edi
+ addl %esi, %edi
+ # 4
+ movl 16(%esp), %edx
+ xorl %edi, %eax
+ addl %edx, %esi
+ roll $10, %ebx
+ addl %eax, %esi
+ movl %edi, %eax
+ roll $5, %esi
+ addl %ecx, %esi
+ # 5
+ xorl %ebx, %eax
+ movl 20(%esp), %edx
+ xorl %esi, %eax
+ addl %eax, %ecx
+ movl %esi, %eax
+ roll $10, %edi
+ addl %edx, %ecx
+ xorl %edi, %eax
+ roll $8, %ecx
+ addl %ebp, %ecx
+ # 6
+ movl 24(%esp), %edx
+ xorl %ecx, %eax
+ addl %edx, %ebp
+ roll $10, %esi
+ addl %eax, %ebp
+ movl %ecx, %eax
+ roll $7, %ebp
+ addl %ebx, %ebp
+ # 7
+ xorl %esi, %eax
+ movl 28(%esp), %edx
+ xorl %ebp, %eax
+ addl %eax, %ebx
+ movl %ebp, %eax
+ roll $10, %ecx
+ addl %edx, %ebx
+ xorl %ecx, %eax
+ roll $9, %ebx
+ addl %edi, %ebx
+ # 8
+ movl 32(%esp), %edx
+ xorl %ebx, %eax
+ addl %edx, %edi
+ roll $10, %ebp
+ addl %eax, %edi
+ movl %ebx, %eax
+ roll $11, %edi
+ addl %esi, %edi
+ # 9
+ xorl %ebp, %eax
+ movl 36(%esp), %edx
+ xorl %edi, %eax
+ addl %eax, %esi
+ movl %edi, %eax
+ roll $10, %ebx
+ addl %edx, %esi
+ xorl %ebx, %eax
+ roll $13, %esi
+ addl %ecx, %esi
+ # 10
+ movl 40(%esp), %edx
+ xorl %esi, %eax
+ addl %edx, %ecx
+ roll $10, %edi
+ addl %eax, %ecx
+ movl %esi, %eax
+ roll $14, %ecx
+ addl %ebp, %ecx
+ # 11
+ xorl %edi, %eax
+ movl 44(%esp), %edx
+ xorl %ecx, %eax
+ addl %eax, %ebp
+ movl %ecx, %eax
+ roll $10, %esi
+ addl %edx, %ebp
+ xorl %esi, %eax
+ roll $15, %ebp
+ addl %ebx, %ebp
+ # 12
+ movl 48(%esp), %edx
+ xorl %ebp, %eax
+ addl %edx, %ebx
+ roll $10, %ecx
+ addl %eax, %ebx
+ movl %ebp, %eax
+ roll $6, %ebx
+ addl %edi, %ebx
+ # 13
+ xorl %ecx, %eax
+ movl 52(%esp), %edx
+ xorl %ebx, %eax
+ addl %eax, %edi
+ movl %ebx, %eax
+ roll $10, %ebp
+ addl %edx, %edi
+ xorl %ebp, %eax
+ roll $7, %edi
+ addl %esi, %edi
+ # 14
+ movl 56(%esp), %edx
+ xorl %edi, %eax
+ addl %edx, %esi
+ roll $10, %ebx
+ addl %eax, %esi
+ movl %edi, %eax
+ roll $9, %esi
+ addl %ecx, %esi
+ # 15
+ xorl %ebx, %eax
+ movl 60(%esp), %edx
+ xorl %esi, %eax
+ addl %eax, %ecx
+ movl $-1, %eax
+ roll $10, %edi
+ addl %edx, %ecx
+ movl 28(%esp), %edx
+ roll $8, %ecx
+ addl %ebp, %ecx
+ # 16
+ addl %edx, %ebp
+ movl %esi, %edx
+ subl %ecx, %eax
+ andl %ecx, %edx
+ andl %edi, %eax
+ orl %eax, %edx
+ movl 16(%esp), %eax
+ roll $10, %esi
+ leal 1518500249(%ebp,%edx,1),%ebp
+ movl $-1, %edx
+ roll $7, %ebp
+ addl %ebx, %ebp
+ # 17
+ addl %eax, %ebx
+ movl %ecx, %eax
+ subl %ebp, %edx
+ andl %ebp, %eax
+ andl %esi, %edx
+ orl %edx, %eax
+ movl 52(%esp), %edx
+ roll $10, %ecx
+ leal 1518500249(%ebx,%eax,1),%ebx
+ movl $-1, %eax
+ roll $6, %ebx
+ addl %edi, %ebx
+ # 18
+ addl %edx, %edi
+ movl %ebp, %edx
+ subl %ebx, %eax
+ andl %ebx, %edx
+ andl %ecx, %eax
+ orl %eax, %edx
+ movl 4(%esp), %eax
+ roll $10, %ebp
+ leal 1518500249(%edi,%edx,1),%edi
+ movl $-1, %edx
+ roll $8, %edi
+ addl %esi, %edi
+ # 19
+ addl %eax, %esi
+ movl %ebx, %eax
+ subl %edi, %edx
+ andl %edi, %eax
+ andl %ebp, %edx
+ orl %edx, %eax
+ movl 40(%esp), %edx
+ roll $10, %ebx
+ leal 1518500249(%esi,%eax,1),%esi
+ movl $-1, %eax
+ roll $13, %esi
+ addl %ecx, %esi
+ # 20
+ addl %edx, %ecx
+ movl %edi, %edx
+ subl %esi, %eax
+ andl %esi, %edx
+ andl %ebx, %eax
+ orl %eax, %edx
+ movl 24(%esp), %eax
+ roll $10, %edi
+ leal 1518500249(%ecx,%edx,1),%ecx
+ movl $-1, %edx
+ roll $11, %ecx
+ addl %ebp, %ecx
+ # 21
+ addl %eax, %ebp
+ movl %esi, %eax
+ subl %ecx, %edx
+ andl %ecx, %eax
+ andl %edi, %edx
+ orl %edx, %eax
+ movl 60(%esp), %edx
+ roll $10, %esi
+ leal 1518500249(%ebp,%eax,1),%ebp
+ movl $-1, %eax
+ roll $9, %ebp
+ addl %ebx, %ebp
+ # 22
+ addl %edx, %ebx
+ movl %ecx, %edx
+ subl %ebp, %eax
+ andl %ebp, %edx
+ andl %esi, %eax
+ orl %eax, %edx
+ movl 12(%esp), %eax
+ roll $10, %ecx
+ leal 1518500249(%ebx,%edx,1),%ebx
+ movl $-1, %edx
+ roll $7, %ebx
+ addl %edi, %ebx
+ # 23
+ addl %eax, %edi
+ movl %ebp, %eax
+ subl %ebx, %edx
+ andl %ebx, %eax
+ andl %ecx, %edx
+ orl %edx, %eax
+ movl 48(%esp), %edx
+ roll $10, %ebp
+ leal 1518500249(%edi,%eax,1),%edi
+ movl $-1, %eax
+ roll $15, %edi
+ addl %esi, %edi
+ # 24
+ addl %edx, %esi
+ movl %ebx, %edx
+ subl %edi, %eax
+ andl %edi, %edx
+ andl %ebp, %eax
+ orl %eax, %edx
+ movl (%esp), %eax
+ roll $10, %ebx
+ leal 1518500249(%esi,%edx,1),%esi
+ movl $-1, %edx
+ roll $7, %esi
+ addl %ecx, %esi
+ # 25
+ addl %eax, %ecx
+ movl %edi, %eax
+ subl %esi, %edx
+ andl %esi, %eax
+ andl %ebx, %edx
+ orl %edx, %eax
+ movl 36(%esp), %edx
+ roll $10, %edi
+ leal 1518500249(%ecx,%eax,1),%ecx
+ movl $-1, %eax
+ roll $12, %ecx
+ addl %ebp, %ecx
+ # 26
+ addl %edx, %ebp
+ movl %esi, %edx
+ subl %ecx, %eax
+ andl %ecx, %edx
+ andl %edi, %eax
+ orl %eax, %edx
+ movl 20(%esp), %eax
+ roll $10, %esi
+ leal 1518500249(%ebp,%edx,1),%ebp
+ movl $-1, %edx
+ roll $15, %ebp
+ addl %ebx, %ebp
+ # 27
+ addl %eax, %ebx
+ movl %ecx, %eax
+ subl %ebp, %edx
+ andl %ebp, %eax
+ andl %esi, %edx
+ orl %edx, %eax
+ movl 8(%esp), %edx
+ roll $10, %ecx
+ leal 1518500249(%ebx,%eax,1),%ebx
+ movl $-1, %eax
+ roll $9, %ebx
+ addl %edi, %ebx
+ # 28
+ addl %edx, %edi
+ movl %ebp, %edx
+ subl %ebx, %eax
+ andl %ebx, %edx
+ andl %ecx, %eax
+ orl %eax, %edx
+ movl 56(%esp), %eax
+ roll $10, %ebp
+ leal 1518500249(%edi,%edx,1),%edi
+ movl $-1, %edx
+ roll $11, %edi
+ addl %esi, %edi
+ # 29
+ addl %eax, %esi
+ movl %ebx, %eax
+ subl %edi, %edx
+ andl %edi, %eax
+ andl %ebp, %edx
+ orl %edx, %eax
+ movl 44(%esp), %edx
+ roll $10, %ebx
+ leal 1518500249(%esi,%eax,1),%esi
+ movl $-1, %eax
+ roll $7, %esi
+ addl %ecx, %esi
+ # 30
+ addl %edx, %ecx
+ movl %edi, %edx
+ subl %esi, %eax
+ andl %esi, %edx
+ andl %ebx, %eax
+ orl %eax, %edx
+ movl 32(%esp), %eax
+ roll $10, %edi
+ leal 1518500249(%ecx,%edx,1),%ecx
+ movl $-1, %edx
+ roll $13, %ecx
+ addl %ebp, %ecx
+ # 31
+ addl %eax, %ebp
+ movl %esi, %eax
+ subl %ecx, %edx
+ andl %ecx, %eax
+ andl %edi, %edx
+ orl %edx, %eax
+ movl $-1, %edx
+ roll $10, %esi
+ leal 1518500249(%ebp,%eax,1),%ebp
+ subl %ecx, %edx
+ roll $12, %ebp
+ addl %ebx, %ebp
+ # 32
+ movl 12(%esp), %eax
+ orl %ebp, %edx
+ addl %eax, %ebx
+ xorl %esi, %edx
+ movl $-1, %eax
+ roll $10, %ecx
+ leal 1859775393(%ebx,%edx,1),%ebx
+ subl %ebp, %eax
+ roll $11, %ebx
+ addl %edi, %ebx
+ # 33
+ movl 40(%esp), %edx
+ orl %ebx, %eax
+ addl %edx, %edi
+ xorl %ecx, %eax
+ movl $-1, %edx
+ roll $10, %ebp
+ leal 1859775393(%edi,%eax,1),%edi
+ subl %ebx, %edx
+ roll $13, %edi
+ addl %esi, %edi
+ # 34
+ movl 56(%esp), %eax
+ orl %edi, %edx
+ addl %eax, %esi
+ xorl %ebp, %edx
+ movl $-1, %eax
+ roll $10, %ebx
+ leal 1859775393(%esi,%edx,1),%esi
+ subl %edi, %eax
+ roll $6, %esi
+ addl %ecx, %esi
+ # 35
+ movl 16(%esp), %edx
+ orl %esi, %eax
+ addl %edx, %ecx
+ xorl %ebx, %eax
+ movl $-1, %edx
+ roll $10, %edi
+ leal 1859775393(%ecx,%eax,1),%ecx
+ subl %esi, %edx
+ roll $7, %ecx
+ addl %ebp, %ecx
+ # 36
+ movl 36(%esp), %eax
+ orl %ecx, %edx
+ addl %eax, %ebp
+ xorl %edi, %edx
+ movl $-1, %eax
+ roll $10, %esi
+ leal 1859775393(%ebp,%edx,1),%ebp
+ subl %ecx, %eax
+ roll $14, %ebp
+ addl %ebx, %ebp
+ # 37
+ movl 60(%esp), %edx
+ orl %ebp, %eax
+ addl %edx, %ebx
+ xorl %esi, %eax
+ movl $-1, %edx
+ roll $10, %ecx
+ leal 1859775393(%ebx,%eax,1),%ebx
+ subl %ebp, %edx
+ roll $9, %ebx
+ addl %edi, %ebx
+ # 38
+ movl 32(%esp), %eax
+ orl %ebx, %edx
+ addl %eax, %edi
+ xorl %ecx, %edx
+ movl $-1, %eax
+ roll $10, %ebp
+ leal 1859775393(%edi,%edx,1),%edi
+ subl %ebx, %eax
+ roll $13, %edi
+ addl %esi, %edi
+ # 39
+ movl 4(%esp), %edx
+ orl %edi, %eax
+ addl %edx, %esi
+ xorl %ebp, %eax
+ movl $-1, %edx
+ roll $10, %ebx
+ leal 1859775393(%esi,%eax,1),%esi
+ subl %edi, %edx
+ roll $15, %esi
+ addl %ecx, %esi
+ # 40
+ movl 8(%esp), %eax
+ orl %esi, %edx
+ addl %eax, %ecx
+ xorl %ebx, %edx
+ movl $-1, %eax
+ roll $10, %edi
+ leal 1859775393(%ecx,%edx,1),%ecx
+ subl %esi, %eax
+ roll $14, %ecx
+ addl %ebp, %ecx
+ # 41
+ movl 28(%esp), %edx
+ orl %ecx, %eax
+ addl %edx, %ebp
+ xorl %edi, %eax
+ movl $-1, %edx
+ roll $10, %esi
+ leal 1859775393(%ebp,%eax,1),%ebp
+ subl %ecx, %edx
+ roll $8, %ebp
+ addl %ebx, %ebp
+ # 42
+ movl (%esp), %eax
+ orl %ebp, %edx
+ addl %eax, %ebx
+ xorl %esi, %edx
+ movl $-1, %eax
+ roll $10, %ecx
+ leal 1859775393(%ebx,%edx,1),%ebx
+ subl %ebp, %eax
+ roll $13, %ebx
+ addl %edi, %ebx
+ # 43
+ movl 24(%esp), %edx
+ orl %ebx, %eax
+ addl %edx, %edi
+ xorl %ecx, %eax
+ movl $-1, %edx
+ roll $10, %ebp
+ leal 1859775393(%edi,%eax,1),%edi
+ subl %ebx, %edx
+ roll $6, %edi
+ addl %esi, %edi
+ # 44
+ movl 52(%esp), %eax
+ orl %edi, %edx
+ addl %eax, %esi
+ xorl %ebp, %edx
+ movl $-1, %eax
+ roll $10, %ebx
+ leal 1859775393(%esi,%edx,1),%esi
+ subl %edi, %eax
+ roll $5, %esi
+ addl %ecx, %esi
+ # 45
+ movl 44(%esp), %edx
+ orl %esi, %eax
+ addl %edx, %ecx
+ xorl %ebx, %eax
+ movl $-1, %edx
+ roll $10, %edi
+ leal 1859775393(%ecx,%eax,1),%ecx
+ subl %esi, %edx
+ roll $12, %ecx
+ addl %ebp, %ecx
+ # 46
+ movl 20(%esp), %eax
+ orl %ecx, %edx
+ addl %eax, %ebp
+ xorl %edi, %edx
+ movl $-1, %eax
+ roll $10, %esi
+ leal 1859775393(%ebp,%edx,1),%ebp
+ subl %ecx, %eax
+ roll $7, %ebp
+ addl %ebx, %ebp
+ # 47
+ movl 48(%esp), %edx
+ orl %ebp, %eax
+ addl %edx, %ebx
+ xorl %esi, %eax
+ movl $-1, %edx
+ roll $10, %ecx
+ leal 1859775393(%ebx,%eax,1),%ebx
+ movl %ecx, %eax
+ roll $5, %ebx
+ addl %edi, %ebx
+ # 48
+ subl %ecx, %edx
+ andl %ebx, %eax
+ andl %ebp, %edx
+ orl %eax, %edx
+ movl 4(%esp), %eax
+ roll $10, %ebp
+ leal 2400959708(%edi,%edx),%edi
+ movl $-1, %edx
+ addl %eax, %edi
+ movl %ebp, %eax
+ roll $11, %edi
+ addl %esi, %edi
+ # 49
+ subl %ebp, %edx
+ andl %edi, %eax
+ andl %ebx, %edx
+ orl %eax, %edx
+ movl 36(%esp), %eax
+ roll $10, %ebx
+ leal 2400959708(%esi,%edx),%esi
+ movl $-1, %edx
+ addl %eax, %esi
+ movl %ebx, %eax
+ roll $12, %esi
+ addl %ecx, %esi
+ # 50
+ subl %ebx, %edx
+ andl %esi, %eax
+ andl %edi, %edx
+ orl %eax, %edx
+ movl 44(%esp), %eax
+ roll $10, %edi
+ leal 2400959708(%ecx,%edx),%ecx
+ movl $-1, %edx
+ addl %eax, %ecx
+ movl %edi, %eax
+ roll $14, %ecx
+ addl %ebp, %ecx
+ # 51
+ subl %edi, %edx
+ andl %ecx, %eax
+ andl %esi, %edx
+ orl %eax, %edx
+ movl 40(%esp), %eax
+ roll $10, %esi
+ leal 2400959708(%ebp,%edx),%ebp
+ movl $-1, %edx
+ addl %eax, %ebp
+ movl %esi, %eax
+ roll $15, %ebp
+ addl %ebx, %ebp
+ # 52
+ subl %esi, %edx
+ andl %ebp, %eax
+ andl %ecx, %edx
+ orl %eax, %edx
+ movl (%esp), %eax
+ roll $10, %ecx
+ leal 2400959708(%ebx,%edx),%ebx
+ movl $-1, %edx
+ addl %eax, %ebx
+ movl %ecx, %eax
+ roll $14, %ebx
+ addl %edi, %ebx
+ # 53
+ subl %ecx, %edx
+ andl %ebx, %eax
+ andl %ebp, %edx
+ orl %eax, %edx
+ movl 32(%esp), %eax
+ roll $10, %ebp
+ leal 2400959708(%edi,%edx),%edi
+ movl $-1, %edx
+ addl %eax, %edi
+ movl %ebp, %eax
+ roll $15, %edi
+ addl %esi, %edi
+ # 54
+ subl %ebp, %edx
+ andl %edi, %eax
+ andl %ebx, %edx
+ orl %eax, %edx
+ movl 48(%esp), %eax
+ roll $10, %ebx
+ leal 2400959708(%esi,%edx),%esi
+ movl $-1, %edx
+ addl %eax, %esi
+ movl %ebx, %eax
+ roll $9, %esi
+ addl %ecx, %esi
+ # 55
+ subl %ebx, %edx
+ andl %esi, %eax
+ andl %edi, %edx
+ orl %eax, %edx
+ movl 16(%esp), %eax
+ roll $10, %edi
+ leal 2400959708(%ecx,%edx),%ecx
+ movl $-1, %edx
+ addl %eax, %ecx
+ movl %edi, %eax
+ roll $8, %ecx
+ addl %ebp, %ecx
+ # 56
+ subl %edi, %edx
+ andl %ecx, %eax
+ andl %esi, %edx
+ orl %eax, %edx
+ movl 52(%esp), %eax
+ roll $10, %esi
+ leal 2400959708(%ebp,%edx),%ebp
+ movl $-1, %edx
+ addl %eax, %ebp
+ movl %esi, %eax
+ roll $9, %ebp
+ addl %ebx, %ebp
+ # 57
+ subl %esi, %edx
+ andl %ebp, %eax
+ andl %ecx, %edx
+ orl %eax, %edx
+ movl 12(%esp), %eax
+ roll $10, %ecx
+ leal 2400959708(%ebx,%edx),%ebx
+ movl $-1, %edx
+ addl %eax, %ebx
+ movl %ecx, %eax
+ roll $14, %ebx
+ addl %edi, %ebx
+ # 58
+ subl %ecx, %edx
+ andl %ebx, %eax
+ andl %ebp, %edx
+ orl %eax, %edx
+ movl 28(%esp), %eax
+ roll $10, %ebp
+ leal 2400959708(%edi,%edx),%edi
+ movl $-1, %edx
+ addl %eax, %edi
+ movl %ebp, %eax
+ roll $5, %edi
+ addl %esi, %edi
+ # 59
+ subl %ebp, %edx
+ andl %edi, %eax
+ andl %ebx, %edx
+ orl %eax, %edx
+ movl 60(%esp), %eax
+ roll $10, %ebx
+ leal 2400959708(%esi,%edx),%esi
+ movl $-1, %edx
+ addl %eax, %esi
+ movl %ebx, %eax
+ roll $6, %esi
+ addl %ecx, %esi
+ # 60
+ subl %ebx, %edx
+ andl %esi, %eax
+ andl %edi, %edx
+ orl %eax, %edx
+ movl 56(%esp), %eax
+ roll $10, %edi
+ leal 2400959708(%ecx,%edx),%ecx
+ movl $-1, %edx
+ addl %eax, %ecx
+ movl %edi, %eax
+ roll $8, %ecx
+ addl %ebp, %ecx
+ # 61
+ subl %edi, %edx
+ andl %ecx, %eax
+ andl %esi, %edx
+ orl %eax, %edx
+ movl 20(%esp), %eax
+ roll $10, %esi
+ leal 2400959708(%ebp,%edx),%ebp
+ movl $-1, %edx
+ addl %eax, %ebp
+ movl %esi, %eax
+ roll $6, %ebp
+ addl %ebx, %ebp
+ # 62
+ subl %esi, %edx
+ andl %ebp, %eax
+ andl %ecx, %edx
+ orl %eax, %edx
+ movl 24(%esp), %eax
+ roll $10, %ecx
+ leal 2400959708(%ebx,%edx),%ebx
+ movl $-1, %edx
+ addl %eax, %ebx
+ movl %ecx, %eax
+ roll $5, %ebx
+ addl %edi, %ebx
+ # 63
+ subl %ecx, %edx
+ andl %ebx, %eax
+ andl %ebp, %edx
+ orl %eax, %edx
+ movl 8(%esp), %eax
+ roll $10, %ebp
+ leal 2400959708(%edi,%edx),%edi
+ movl $-1, %edx
+ addl %eax, %edi
+ subl %ebp, %edx
+ roll $12, %edi
+ addl %esi, %edi
+ # 64
+ movl 16(%esp), %eax
+ orl %ebx, %edx
+ addl %eax, %esi
+ xorl %edi, %edx
+ movl $-1, %eax
+ roll $10, %ebx
+ leal 2840853838(%esi,%edx,1),%esi
+ subl %ebx, %eax
+ roll $9, %esi
+ addl %ecx, %esi
+ # 65
+ movl (%esp), %edx
+ orl %edi, %eax
+ addl %edx, %ecx
+ xorl %esi, %eax
+ movl $-1, %edx
+ roll $10, %edi
+ leal 2840853838(%ecx,%eax,1),%ecx
+ subl %edi, %edx
+ roll $15, %ecx
+ addl %ebp, %ecx
+ # 66
+ movl 20(%esp), %eax
+ orl %esi, %edx
+ addl %eax, %ebp
+ xorl %ecx, %edx
+ movl $-1, %eax
+ roll $10, %esi
+ leal 2840853838(%ebp,%edx,1),%ebp
+ subl %esi, %eax
+ roll $5, %ebp
+ addl %ebx, %ebp
+ # 67
+ movl 36(%esp), %edx
+ orl %ecx, %eax
+ addl %edx, %ebx
+ xorl %ebp, %eax
+ movl $-1, %edx
+ roll $10, %ecx
+ leal 2840853838(%ebx,%eax,1),%ebx
+ subl %ecx, %edx
+ roll $11, %ebx
+ addl %edi, %ebx
+ # 68
+ movl 28(%esp), %eax
+ orl %ebp, %edx
+ addl %eax, %edi
+ xorl %ebx, %edx
+ movl $-1, %eax
+ roll $10, %ebp
+ leal 2840853838(%edi,%edx,1),%edi
+ subl %ebp, %eax
+ roll $6, %edi
+ addl %esi, %edi
+ # 69
+ movl 48(%esp), %edx
+ orl %ebx, %eax
+ addl %edx, %esi
+ xorl %edi, %eax
+ movl $-1, %edx
+ roll $10, %ebx
+ leal 2840853838(%esi,%eax,1),%esi
+ subl %ebx, %edx
+ roll $8, %esi
+ addl %ecx, %esi
+ # 70
+ movl 8(%esp), %eax
+ orl %edi, %edx
+ addl %eax, %ecx
+ xorl %esi, %edx
+ movl $-1, %eax
+ roll $10, %edi
+ leal 2840853838(%ecx,%edx,1),%ecx
+ subl %edi, %eax
+ roll $13, %ecx
+ addl %ebp, %ecx
+ # 71
+ movl 40(%esp), %edx
+ orl %esi, %eax
+ addl %edx, %ebp
+ xorl %ecx, %eax
+ movl $-1, %edx
+ roll $10, %esi
+ leal 2840853838(%ebp,%eax,1),%ebp
+ subl %esi, %edx
+ roll $12, %ebp
+ addl %ebx, %ebp
+ # 72
+ movl 56(%esp), %eax
+ orl %ecx, %edx
+ addl %eax, %ebx
+ xorl %ebp, %edx
+ movl $-1, %eax
+ roll $10, %ecx
+ leal 2840853838(%ebx,%edx,1),%ebx
+ subl %ecx, %eax
+ roll $5, %ebx
+ addl %edi, %ebx
+ # 73
+ movl 4(%esp), %edx
+ orl %ebp, %eax
+ addl %edx, %edi
+ xorl %ebx, %eax
+ movl $-1, %edx
+ roll $10, %ebp
+ leal 2840853838(%edi,%eax,1),%edi
+ subl %ebp, %edx
+ roll $12, %edi
+ addl %esi, %edi
+ # 74
+ movl 12(%esp), %eax
+ orl %ebx, %edx
+ addl %eax, %esi
+ xorl %edi, %edx
+ movl $-1, %eax
+ roll $10, %ebx
+ leal 2840853838(%esi,%edx,1),%esi
+ subl %ebx, %eax
+ roll $13, %esi
+ addl %ecx, %esi
+ # 75
+ movl 32(%esp), %edx
+ orl %edi, %eax
+ addl %edx, %ecx
+ xorl %esi, %eax
+ movl $-1, %edx
+ roll $10, %edi
+ leal 2840853838(%ecx,%eax,1),%ecx
+ subl %edi, %edx
+ roll $14, %ecx
+ addl %ebp, %ecx
+ # 76
+ movl 44(%esp), %eax
+ orl %esi, %edx
+ addl %eax, %ebp
+ xorl %ecx, %edx
+ movl $-1, %eax
+ roll $10, %esi
+ leal 2840853838(%ebp,%edx,1),%ebp
+ subl %esi, %eax
+ roll $11, %ebp
+ addl %ebx, %ebp
+ # 77
+ movl 24(%esp), %edx
+ orl %ecx, %eax
+ addl %edx, %ebx
+ xorl %ebp, %eax
+ movl $-1, %edx
+ roll $10, %ecx
+ leal 2840853838(%ebx,%eax,1),%ebx
+ subl %ecx, %edx
+ roll $8, %ebx
+ addl %edi, %ebx
+ # 78
+ movl 60(%esp), %eax
+ orl %ebp, %edx
+ addl %eax, %edi
+ xorl %ebx, %edx
+ movl $-1, %eax
+ roll $10, %ebp
+ leal 2840853838(%edi,%edx,1),%edi
+ subl %ebp, %eax
+ roll $5, %edi
+ addl %esi, %edi
+ # 79
+ movl 52(%esp), %edx
+ orl %ebx, %eax
+ addl %edx, %esi
+ xorl %edi, %eax
+ movl 128(%esp), %edx
+ roll $10, %ebx
+ leal 2840853838(%esi,%eax,1),%esi
+ movl %ecx, 64(%esp)
+ roll $6, %esi
+ addl %ecx, %esi
+ movl (%edx), %ecx
+ movl %esi, 68(%esp)
+ movl %edi, 72(%esp)
+ movl 4(%edx), %esi
+ movl %ebx, 76(%esp)
+ movl 8(%edx), %edi
+ movl %ebp, 80(%esp)
+ movl 12(%edx), %ebx
+ movl 16(%edx), %ebp
+ # 80
+ movl $-1, %edx
+ subl %ebx, %edx
+ movl 20(%esp), %eax
+ orl %edi, %edx
+ addl %eax, %ecx
+ xorl %esi, %edx
+ movl $-1, %eax
+ roll $10, %edi
+ leal 1352829926(%ecx,%edx,1),%ecx
+ subl %edi, %eax
+ roll $8, %ecx
+ addl %ebp, %ecx
+ # 81
+ movl 56(%esp), %edx
+ orl %esi, %eax
+ addl %edx, %ebp
+ xorl %ecx, %eax
+ movl $-1, %edx
+ roll $10, %esi
+ leal 1352829926(%ebp,%eax,1),%ebp
+ subl %esi, %edx
+ roll $9, %ebp
+ addl %ebx, %ebp
+ # 82
+ movl 28(%esp), %eax
+ orl %ecx, %edx
+ addl %eax, %ebx
+ xorl %ebp, %edx
+ movl $-1, %eax
+ roll $10, %ecx
+ leal 1352829926(%ebx,%edx,1),%ebx
+ subl %ecx, %eax
+ roll $9, %ebx
+ addl %edi, %ebx
+ # 83
+ movl (%esp), %edx
+ orl %ebp, %eax
+ addl %edx, %edi
+ xorl %ebx, %eax
+ movl $-1, %edx
+ roll $10, %ebp
+ leal 1352829926(%edi,%eax,1),%edi
+ subl %ebp, %edx
+ roll $11, %edi
+ addl %esi, %edi
+ # 84
+ movl 36(%esp), %eax
+ orl %ebx, %edx
+ addl %eax, %esi
+ xorl %edi, %edx
+ movl $-1, %eax
+ roll $10, %ebx
+ leal 1352829926(%esi,%edx,1),%esi
+ subl %ebx, %eax
+ roll $13, %esi
+ addl %ecx, %esi
+ # 85
+ movl 8(%esp), %edx
+ orl %edi, %eax
+ addl %edx, %ecx
+ xorl %esi, %eax
+ movl $-1, %edx
+ roll $10, %edi
+ leal 1352829926(%ecx,%eax,1),%ecx
+ subl %edi, %edx
+ roll $15, %ecx
+ addl %ebp, %ecx
+ # 86
+ movl 44(%esp), %eax
+ orl %esi, %edx
+ addl %eax, %ebp
+ xorl %ecx, %edx
+ movl $-1, %eax
+ roll $10, %esi
+ leal 1352829926(%ebp,%edx,1),%ebp
+ subl %esi, %eax
+ roll $15, %ebp
+ addl %ebx, %ebp
+ # 87
+ movl 16(%esp), %edx
+ orl %ecx, %eax
+ addl %edx, %ebx
+ xorl %ebp, %eax
+ movl $-1, %edx
+ roll $10, %ecx
+ leal 1352829926(%ebx,%eax,1),%ebx
+ subl %ecx, %edx
+ roll $5, %ebx
+ addl %edi, %ebx
+ # 88
+ movl 52(%esp), %eax
+ orl %ebp, %edx
+ addl %eax, %edi
+ xorl %ebx, %edx
+ movl $-1, %eax
+ roll $10, %ebp
+ leal 1352829926(%edi,%edx,1),%edi
+ subl %ebp, %eax
+ roll $7, %edi
+ addl %esi, %edi
+ # 89
+ movl 24(%esp), %edx
+ orl %ebx, %eax
+ addl %edx, %esi
+ xorl %edi, %eax
+ movl $-1, %edx
+ roll $10, %ebx
+ leal 1352829926(%esi,%eax,1),%esi
+ subl %ebx, %edx
+ roll $7, %esi
+ addl %ecx, %esi
+ # 90
+ movl 60(%esp), %eax
+ orl %edi, %edx
+ addl %eax, %ecx
+ xorl %esi, %edx
+ movl $-1, %eax
+ roll $10, %edi
+ leal 1352829926(%ecx,%edx,1),%ecx
+ subl %edi, %eax
+ roll $8, %ecx
+ addl %ebp, %ecx
+ # 91
+ movl 32(%esp), %edx
+ orl %esi, %eax
+ addl %edx, %ebp
+ xorl %ecx, %eax
+ movl $-1, %edx
+ roll $10, %esi
+ leal 1352829926(%ebp,%eax,1),%ebp
+ subl %esi, %edx
+ roll $11, %ebp
+ addl %ebx, %ebp
+ # 92
+ movl 4(%esp), %eax
+ orl %ecx, %edx
+ addl %eax, %ebx
+ xorl %ebp, %edx
+ movl $-1, %eax
+ roll $10, %ecx
+ leal 1352829926(%ebx,%edx,1),%ebx
+ subl %ecx, %eax
+ roll $14, %ebx
+ addl %edi, %ebx
+ # 93
+ movl 40(%esp), %edx
+ orl %ebp, %eax
+ addl %edx, %edi
+ xorl %ebx, %eax
+ movl $-1, %edx
+ roll $10, %ebp
+ leal 1352829926(%edi,%eax,1),%edi
+ subl %ebp, %edx
+ roll $14, %edi
+ addl %esi, %edi
+ # 94
+ movl 12(%esp), %eax
+ orl %ebx, %edx
+ addl %eax, %esi
+ xorl %edi, %edx
+ movl $-1, %eax
+ roll $10, %ebx
+ leal 1352829926(%esi,%edx,1),%esi
+ subl %ebx, %eax
+ roll $12, %esi
+ addl %ecx, %esi
+ # 95
+ movl 48(%esp), %edx
+ orl %edi, %eax
+ addl %edx, %ecx
+ xorl %esi, %eax
+ movl $-1, %edx
+ roll $10, %edi
+ leal 1352829926(%ecx,%eax,1),%ecx
+ movl %edi, %eax
+ roll $6, %ecx
+ addl %ebp, %ecx
+ # 96
+ subl %edi, %edx
+ andl %ecx, %eax
+ andl %esi, %edx
+ orl %eax, %edx
+ movl 24(%esp), %eax
+ roll $10, %esi
+ leal 1548603684(%ebp,%edx),%ebp
+ movl $-1, %edx
+ addl %eax, %ebp
+ movl %esi, %eax
+ roll $9, %ebp
+ addl %ebx, %ebp
+ # 97
+ subl %esi, %edx
+ andl %ebp, %eax
+ andl %ecx, %edx
+ orl %eax, %edx
+ movl 44(%esp), %eax
+ roll $10, %ecx
+ leal 1548603684(%ebx,%edx),%ebx
+ movl $-1, %edx
+ addl %eax, %ebx
+ movl %ecx, %eax
+ roll $13, %ebx
+ addl %edi, %ebx
+ # 98
+ subl %ecx, %edx
+ andl %ebx, %eax
+ andl %ebp, %edx
+ orl %eax, %edx
+ movl 12(%esp), %eax
+ roll $10, %ebp
+ leal 1548603684(%edi,%edx),%edi
+ movl $-1, %edx
+ addl %eax, %edi
+ movl %ebp, %eax
+ roll $15, %edi
+ addl %esi, %edi
+ # 99
+ subl %ebp, %edx
+ andl %edi, %eax
+ andl %ebx, %edx
+ orl %eax, %edx
+ movl 28(%esp), %eax
+ roll $10, %ebx
+ leal 1548603684(%esi,%edx),%esi
+ movl $-1, %edx
+ addl %eax, %esi
+ movl %ebx, %eax
+ roll $7, %esi
+ addl %ecx, %esi
+ # 100
+ subl %ebx, %edx
+ andl %esi, %eax
+ andl %edi, %edx
+ orl %eax, %edx
+ movl (%esp), %eax
+ roll $10, %edi
+ leal 1548603684(%ecx,%edx),%ecx
+ movl $-1, %edx
+ addl %eax, %ecx
+ movl %edi, %eax
+ roll $12, %ecx
+ addl %ebp, %ecx
+ # 101
+ subl %edi, %edx
+ andl %ecx, %eax
+ andl %esi, %edx
+ orl %eax, %edx
+ movl 52(%esp), %eax
+ roll $10, %esi
+ leal 1548603684(%ebp,%edx),%ebp
+ movl $-1, %edx
+ addl %eax, %ebp
+ movl %esi, %eax
+ roll $8, %ebp
+ addl %ebx, %ebp
+ # 102
+ subl %esi, %edx
+ andl %ebp, %eax
+ andl %ecx, %edx
+ orl %eax, %edx
+ movl 20(%esp), %eax
+ roll $10, %ecx
+ leal 1548603684(%ebx,%edx),%ebx
+ movl $-1, %edx
+ addl %eax, %ebx
+ movl %ecx, %eax
+ roll $9, %ebx
+ addl %edi, %ebx
+ # 103
+ subl %ecx, %edx
+ andl %ebx, %eax
+ andl %ebp, %edx
+ orl %eax, %edx
+ movl 40(%esp), %eax
+ roll $10, %ebp
+ leal 1548603684(%edi,%edx),%edi
+ movl $-1, %edx
+ addl %eax, %edi
+ movl %ebp, %eax
+ roll $11, %edi
+ addl %esi, %edi
+ # 104
+ subl %ebp, %edx
+ andl %edi, %eax
+ andl %ebx, %edx
+ orl %eax, %edx
+ movl 56(%esp), %eax
+ roll $10, %ebx
+ leal 1548603684(%esi,%edx),%esi
+ movl $-1, %edx
+ addl %eax, %esi
+ movl %ebx, %eax
+ roll $7, %esi
+ addl %ecx, %esi
+ # 105
+ subl %ebx, %edx
+ andl %esi, %eax
+ andl %edi, %edx
+ orl %eax, %edx
+ movl 60(%esp), %eax
+ roll $10, %edi
+ leal 1548603684(%ecx,%edx),%ecx
+ movl $-1, %edx
+ addl %eax, %ecx
+ movl %edi, %eax
+ roll $7, %ecx
+ addl %ebp, %ecx
+ # 106
+ subl %edi, %edx
+ andl %ecx, %eax
+ andl %esi, %edx
+ orl %eax, %edx
+ movl 32(%esp), %eax
+ roll $10, %esi
+ leal 1548603684(%ebp,%edx),%ebp
+ movl $-1, %edx
+ addl %eax, %ebp
+ movl %esi, %eax
+ roll $12, %ebp
+ addl %ebx, %ebp
+ # 107
+ subl %esi, %edx
+ andl %ebp, %eax
+ andl %ecx, %edx
+ orl %eax, %edx
+ movl 48(%esp), %eax
+ roll $10, %ecx
+ leal 1548603684(%ebx,%edx),%ebx
+ movl $-1, %edx
+ addl %eax, %ebx
+ movl %ecx, %eax
+ roll $7, %ebx
+ addl %edi, %ebx
+ # 108
+ subl %ecx, %edx
+ andl %ebx, %eax
+ andl %ebp, %edx
+ orl %eax, %edx
+ movl 16(%esp), %eax
+ roll $10, %ebp
+ leal 1548603684(%edi,%edx),%edi
+ movl $-1, %edx
+ addl %eax, %edi
+ movl %ebp, %eax
+ roll $6, %edi
+ addl %esi, %edi
+ # 109
+ subl %ebp, %edx
+ andl %edi, %eax
+ andl %ebx, %edx
+ orl %eax, %edx
+ movl 36(%esp), %eax
+ roll $10, %ebx
+ leal 1548603684(%esi,%edx),%esi
+ movl $-1, %edx
+ addl %eax, %esi
+ movl %ebx, %eax
+ roll $15, %esi
+ addl %ecx, %esi
+ # 110
+ subl %ebx, %edx
+ andl %esi, %eax
+ andl %edi, %edx
+ orl %eax, %edx
+ movl 4(%esp), %eax
+ roll $10, %edi
+ leal 1548603684(%ecx,%edx),%ecx
+ movl $-1, %edx
+ addl %eax, %ecx
+ movl %edi, %eax
+ roll $13, %ecx
+ addl %ebp, %ecx
+ # 111
+ subl %edi, %edx
+ andl %ecx, %eax
+ andl %esi, %edx
+ orl %eax, %edx
+ movl 8(%esp), %eax
+ roll $10, %esi
+ leal 1548603684(%ebp,%edx),%ebp
+ movl $-1, %edx
+ addl %eax, %ebp
+ subl %ecx, %edx
+ roll $11, %ebp
+ addl %ebx, %ebp
+ # 112
+ movl 60(%esp), %eax
+ orl %ebp, %edx
+ addl %eax, %ebx
+ xorl %esi, %edx
+ movl $-1, %eax
+ roll $10, %ecx
+ leal 1836072691(%ebx,%edx,1),%ebx
+ subl %ebp, %eax
+ roll $9, %ebx
+ addl %edi, %ebx
+ # 113
+ movl 20(%esp), %edx
+ orl %ebx, %eax
+ addl %edx, %edi
+ xorl %ecx, %eax
+ movl $-1, %edx
+ roll $10, %ebp
+ leal 1836072691(%edi,%eax,1),%edi
+ subl %ebx, %edx
+ roll $7, %edi
+ addl %esi, %edi
+ # 114
+ movl 4(%esp), %eax
+ orl %edi, %edx
+ addl %eax, %esi
+ xorl %ebp, %edx
+ movl $-1, %eax
+ roll $10, %ebx
+ leal 1836072691(%esi,%edx,1),%esi
+ subl %edi, %eax
+ roll $15, %esi
+ addl %ecx, %esi
+ # 115
+ movl 12(%esp), %edx
+ orl %esi, %eax
+ addl %edx, %ecx
+ xorl %ebx, %eax
+ movl $-1, %edx
+ roll $10, %edi
+ leal 1836072691(%ecx,%eax,1),%ecx
+ subl %esi, %edx
+ roll $11, %ecx
+ addl %ebp, %ecx
+ # 116
+ movl 28(%esp), %eax
+ orl %ecx, %edx
+ addl %eax, %ebp
+ xorl %edi, %edx
+ movl $-1, %eax
+ roll $10, %esi
+ leal 1836072691(%ebp,%edx,1),%ebp
+ subl %ecx, %eax
+ roll $8, %ebp
+ addl %ebx, %ebp
+ # 117
+ movl 56(%esp), %edx
+ orl %ebp, %eax
+ addl %edx, %ebx
+ xorl %esi, %eax
+ movl $-1, %edx
+ roll $10, %ecx
+ leal 1836072691(%ebx,%eax,1),%ebx
+ subl %ebp, %edx
+ roll $6, %ebx
+ addl %edi, %ebx
+ # 118
+ movl 24(%esp), %eax
+ orl %ebx, %edx
+ addl %eax, %edi
+ xorl %ecx, %edx
+ movl $-1, %eax
+ roll $10, %ebp
+ leal 1836072691(%edi,%edx,1),%edi
+ subl %ebx, %eax
+ roll $6, %edi
+ addl %esi, %edi
+ # 119
+ movl 36(%esp), %edx
+ orl %edi, %eax
+ addl %edx, %esi
+ xorl %ebp, %eax
+ movl $-1, %edx
+ roll $10, %ebx
+ leal 1836072691(%esi,%eax,1),%esi
+ subl %edi, %edx
+ roll $14, %esi
+ addl %ecx, %esi
+ # 120
+ movl 44(%esp), %eax
+ orl %esi, %edx
+ addl %eax, %ecx
+ xorl %ebx, %edx
+ movl $-1, %eax
+ roll $10, %edi
+ leal 1836072691(%ecx,%edx,1),%ecx
+ subl %esi, %eax
+ roll $12, %ecx
+ addl %ebp, %ecx
+ # 121
+ movl 32(%esp), %edx
+ orl %ecx, %eax
+ addl %edx, %ebp
+ xorl %edi, %eax
+ movl $-1, %edx
+ roll $10, %esi
+ leal 1836072691(%ebp,%eax,1),%ebp
+ subl %ecx, %edx
+ roll $13, %ebp
+ addl %ebx, %ebp
+ # 122
+ movl 48(%esp), %eax
+ orl %ebp, %edx
+ addl %eax, %ebx
+ xorl %esi, %edx
+ movl $-1, %eax
+ roll $10, %ecx
+ leal 1836072691(%ebx,%edx,1),%ebx
+ subl %ebp, %eax
+ roll $5, %ebx
+ addl %edi, %ebx
+ # 123
+ movl 8(%esp), %edx
+ orl %ebx, %eax
+ addl %edx, %edi
+ xorl %ecx, %eax
+ movl $-1, %edx
+ roll $10, %ebp
+ leal 1836072691(%edi,%eax,1),%edi
+ subl %ebx, %edx
+ roll $14, %edi
+ addl %esi, %edi
+ # 124
+ movl 40(%esp), %eax
+ orl %edi, %edx
+ addl %eax, %esi
+ xorl %ebp, %edx
+ movl $-1, %eax
+ roll $10, %ebx
+ leal 1836072691(%esi,%edx,1),%esi
+ subl %edi, %eax
+ roll $13, %esi
+ addl %ecx, %esi
+ # 125
+ movl (%esp), %edx
+ orl %esi, %eax
+ addl %edx, %ecx
+ xorl %ebx, %eax
+ movl $-1, %edx
+ roll $10, %edi
+ leal 1836072691(%ecx,%eax,1),%ecx
+ subl %esi, %edx
+ roll $13, %ecx
+ addl %ebp, %ecx
+ # 126
+ movl 16(%esp), %eax
+ orl %ecx, %edx
+ addl %eax, %ebp
+ xorl %edi, %edx
+ movl $-1, %eax
+ roll $10, %esi
+ leal 1836072691(%ebp,%edx,1),%ebp
+ subl %ecx, %eax
+ roll $7, %ebp
+ addl %ebx, %ebp
+ # 127
+ movl 52(%esp), %edx
+ orl %ebp, %eax
+ addl %edx, %ebx
+ xorl %esi, %eax
+ movl 32(%esp), %edx
+ roll $10, %ecx
+ leal 1836072691(%ebx,%eax,1),%ebx
+ movl $-1, %eax
+ roll $5, %ebx
+ addl %edi, %ebx
+ # 128
+ addl %edx, %edi
+ movl %ebp, %edx
+ subl %ebx, %eax
+ andl %ebx, %edx
+ andl %ecx, %eax
+ orl %eax, %edx
+ movl 24(%esp), %eax
+ roll $10, %ebp
+ leal 2053994217(%edi,%edx,1),%edi
+ movl $-1, %edx
+ roll $15, %edi
+ addl %esi, %edi
+ # 129
+ addl %eax, %esi
+ movl %ebx, %eax
+ subl %edi, %edx
+ andl %edi, %eax
+ andl %ebp, %edx
+ orl %edx, %eax
+ movl 16(%esp), %edx
+ roll $10, %ebx
+ leal 2053994217(%esi,%eax,1),%esi
+ movl $-1, %eax
+ roll $5, %esi
+ addl %ecx, %esi
+ # 130
+ addl %edx, %ecx
+ movl %edi, %edx
+ subl %esi, %eax
+ andl %esi, %edx
+ andl %ebx, %eax
+ orl %eax, %edx
+ movl 4(%esp), %eax
+ roll $10, %edi
+ leal 2053994217(%ecx,%edx,1),%ecx
+ movl $-1, %edx
+ roll $8, %ecx
+ addl %ebp, %ecx
+ # 131
+ addl %eax, %ebp
+ movl %esi, %eax
+ subl %ecx, %edx
+ andl %ecx, %eax
+ andl %edi, %edx
+ orl %edx, %eax
+ movl 12(%esp), %edx
+ roll $10, %esi
+ leal 2053994217(%ebp,%eax,1),%ebp
+ movl $-1, %eax
+ roll $11, %ebp
+ addl %ebx, %ebp
+ # 132
+ addl %edx, %ebx
+ movl %ecx, %edx
+ subl %ebp, %eax
+ andl %ebp, %edx
+ andl %esi, %eax
+ orl %eax, %edx
+ movl 44(%esp), %eax
+ roll $10, %ecx
+ leal 2053994217(%ebx,%edx,1),%ebx
+ movl $-1, %edx
+ roll $14, %ebx
+ addl %edi, %ebx
+ # 133
+ addl %eax, %edi
+ movl %ebp, %eax
+ subl %ebx, %edx
+ andl %ebx, %eax
+ andl %ecx, %edx
+ orl %edx, %eax
+ movl 60(%esp), %edx
+ roll $10, %ebp
+ leal 2053994217(%edi,%eax,1),%edi
+ movl $-1, %eax
+ roll $14, %edi
+ addl %esi, %edi
+ # 134
+ addl %edx, %esi
+ movl %ebx, %edx
+ subl %edi, %eax
+ andl %edi, %edx
+ andl %ebp, %eax
+ orl %eax, %edx
+ movl (%esp), %eax
+ roll $10, %ebx
+ leal 2053994217(%esi,%edx,1),%esi
+ movl $-1, %edx
+ roll $6, %esi
+ addl %ecx, %esi
+ # 135
+ addl %eax, %ecx
+ movl %edi, %eax
+ subl %esi, %edx
+ andl %esi, %eax
+ andl %ebx, %edx
+ orl %edx, %eax
+ movl 20(%esp), %edx
+ roll $10, %edi
+ leal 2053994217(%ecx,%eax,1),%ecx
+ movl $-1, %eax
+ roll $14, %ecx
+ addl %ebp, %ecx
+ # 136
+ addl %edx, %ebp
+ movl %esi, %edx
+ subl %ecx, %eax
+ andl %ecx, %edx
+ andl %edi, %eax
+ orl %eax, %edx
+ movl 48(%esp), %eax
+ roll $10, %esi
+ leal 2053994217(%ebp,%edx,1),%ebp
+ movl $-1, %edx
+ roll $6, %ebp
+ addl %ebx, %ebp
+ # 137
+ addl %eax, %ebx
+ movl %ecx, %eax
+ subl %ebp, %edx
+ andl %ebp, %eax
+ andl %esi, %edx
+ orl %edx, %eax
+ movl 8(%esp), %edx
+ roll $10, %ecx
+ leal 2053994217(%ebx,%eax,1),%ebx
+ movl $-1, %eax
+ roll $9, %ebx
+ addl %edi, %ebx
+ # 138
+ addl %edx, %edi
+ movl %ebp, %edx
+ subl %ebx, %eax
+ andl %ebx, %edx
+ andl %ecx, %eax
+ orl %eax, %edx
+ movl 52(%esp), %eax
+ roll $10, %ebp
+ leal 2053994217(%edi,%edx,1),%edi
+ movl $-1, %edx
+ roll $12, %edi
+ addl %esi, %edi
+ # 139
+ addl %eax, %esi
+ movl %ebx, %eax
+ subl %edi, %edx
+ andl %edi, %eax
+ andl %ebp, %edx
+ orl %edx, %eax
+ movl 36(%esp), %edx
+ roll $10, %ebx
+ leal 2053994217(%esi,%eax,1),%esi
+ movl $-1, %eax
+ roll $9, %esi
+ addl %ecx, %esi
+ # 140
+ addl %edx, %ecx
+ movl %edi, %edx
+ subl %esi, %eax
+ andl %esi, %edx
+ andl %ebx, %eax
+ orl %eax, %edx
+ movl 28(%esp), %eax
+ roll $10, %edi
+ leal 2053994217(%ecx,%edx,1),%ecx
+ movl $-1, %edx
+ roll $12, %ecx
+ addl %ebp, %ecx
+ # 141
+ addl %eax, %ebp
+ movl %esi, %eax
+ subl %ecx, %edx
+ andl %ecx, %eax
+ andl %edi, %edx
+ orl %edx, %eax
+ movl 40(%esp), %edx
+ roll $10, %esi
+ leal 2053994217(%ebp,%eax,1),%ebp
+ movl $-1, %eax
+ roll $5, %ebp
+ addl %ebx, %ebp
+ # 142
+ addl %edx, %ebx
+ movl %ecx, %edx
+ subl %ebp, %eax
+ andl %ebp, %edx
+ andl %esi, %eax
+ orl %eax, %edx
+ movl 56(%esp), %eax
+ roll $10, %ecx
+ leal 2053994217(%ebx,%edx,1),%ebx
+ movl $-1, %edx
+ roll $15, %ebx
+ addl %edi, %ebx
+ # 143
+ addl %eax, %edi
+ movl %ebp, %eax
+ subl %ebx, %edx
+ andl %ebx, %eax
+ andl %ecx, %edx
+ orl %eax, %edx
+ movl %ebx, %eax
+ roll $10, %ebp
+ leal 2053994217(%edi,%edx,1),%edi
+ xorl %ebp, %eax
+ roll $8, %edi
+ addl %esi, %edi
+ # 144
+ movl 48(%esp), %edx
+ xorl %edi, %eax
+ addl %edx, %esi
+ roll $10, %ebx
+ addl %eax, %esi
+ movl %edi, %eax
+ roll $8, %esi
+ addl %ecx, %esi
+ # 145
+ xorl %ebx, %eax
+ movl 60(%esp), %edx
+ xorl %esi, %eax
+ addl %eax, %ecx
+ movl %esi, %eax
+ roll $10, %edi
+ addl %edx, %ecx
+ xorl %edi, %eax
+ roll $5, %ecx
+ addl %ebp, %ecx
+ # 146
+ movl 40(%esp), %edx
+ xorl %ecx, %eax
+ addl %edx, %ebp
+ roll $10, %esi
+ addl %eax, %ebp
+ movl %ecx, %eax
+ roll $12, %ebp
+ addl %ebx, %ebp
+ # 147
+ xorl %esi, %eax
+ movl 16(%esp), %edx
+ xorl %ebp, %eax
+ addl %eax, %ebx
+ movl %ebp, %eax
+ roll $10, %ecx
+ addl %edx, %ebx
+ xorl %ecx, %eax
+ roll $9, %ebx
+ addl %edi, %ebx
+ # 148
+ movl 4(%esp), %edx
+ xorl %ebx, %eax
+ addl %edx, %edi
+ roll $10, %ebp
+ addl %eax, %edi
+ movl %ebx, %eax
+ roll $12, %edi
+ addl %esi, %edi
+ # 149
+ xorl %ebp, %eax
+ movl 20(%esp), %edx
+ xorl %edi, %eax
+ addl %eax, %esi
+ movl %edi, %eax
+ roll $10, %ebx
+ addl %edx, %esi
+ xorl %ebx, %eax
+ roll $5, %esi
+ addl %ecx, %esi
+ # 150
+ movl 32(%esp), %edx
+ xorl %esi, %eax
+ addl %edx, %ecx
+ roll $10, %edi
+ addl %eax, %ecx
+ movl %esi, %eax
+ roll $14, %ecx
+ addl %ebp, %ecx
+ # 151
+ xorl %edi, %eax
+ movl 28(%esp), %edx
+ xorl %ecx, %eax
+ addl %eax, %ebp
+ movl %ecx, %eax
+ roll $10, %esi
+ addl %edx, %ebp
+ xorl %esi, %eax
+ roll $6, %ebp
+ addl %ebx, %ebp
+ # 152
+ movl 24(%esp), %edx
+ xorl %ebp, %eax
+ addl %edx, %ebx
+ roll $10, %ecx
+ addl %eax, %ebx
+ movl %ebp, %eax
+ roll $8, %ebx
+ addl %edi, %ebx
+ # 153
+ xorl %ecx, %eax
+ movl 8(%esp), %edx
+ xorl %ebx, %eax
+ addl %eax, %edi
+ movl %ebx, %eax
+ roll $10, %ebp
+ addl %edx, %edi
+ xorl %ebp, %eax
+ roll $13, %edi
+ addl %esi, %edi
+ # 154
+ movl 52(%esp), %edx
+ xorl %edi, %eax
+ addl %edx, %esi
+ roll $10, %ebx
+ addl %eax, %esi
+ movl %edi, %eax
+ roll $6, %esi
+ addl %ecx, %esi
+ # 155
+ xorl %ebx, %eax
+ movl 56(%esp), %edx
+ xorl %esi, %eax
+ addl %eax, %ecx
+ movl %esi, %eax
+ roll $10, %edi
+ addl %edx, %ecx
+ xorl %edi, %eax
+ roll $5, %ecx
+ addl %ebp, %ecx
+ # 156
+ movl (%esp), %edx
+ xorl %ecx, %eax
+ addl %edx, %ebp
+ roll $10, %esi
+ addl %eax, %ebp
+ movl %ecx, %eax
+ roll $15, %ebp
+ addl %ebx, %ebp
+ # 157
+ xorl %esi, %eax
+ movl 12(%esp), %edx
+ xorl %ebp, %eax
+ addl %eax, %ebx
+ movl %ebp, %eax
+ roll $10, %ecx
+ addl %edx, %ebx
+ xorl %ecx, %eax
+ roll $13, %ebx
+ addl %edi, %ebx
+ # 158
+ movl 36(%esp), %edx
+ xorl %ebx, %eax
+ addl %edx, %edi
+ roll $10, %ebp
+ addl %eax, %edi
+ movl %ebx, %eax
+ roll $11, %edi
+ addl %esi, %edi
+ # 159
+ xorl %ebp, %eax
+ movl 44(%esp), %edx
+ xorl %edi, %eax
+ addl %eax, %esi
+ roll $10, %ebx
+ addl %edx, %esi
+ movl 128(%esp), %edx
+ roll $11, %esi
+ addl %ecx, %esi
+ movl 4(%edx), %eax
+ addl %eax, %ebx
+ movl 72(%esp), %eax
+ addl %eax, %ebx
+ movl 8(%edx), %eax
+ addl %eax, %ebp
+ movl 76(%esp), %eax
+ addl %eax, %ebp
+ movl 12(%edx), %eax
+ addl %eax, %ecx
+ movl 80(%esp), %eax
+ addl %eax, %ecx
+ movl 16(%edx), %eax
+ addl %eax, %esi
+ movl 64(%esp), %eax
+ addl %eax, %esi
+ movl (%edx), %eax
+ addl %eax, %edi
+ movl 68(%esp), %eax
+ addl %eax, %edi
+ movl 136(%esp), %eax
+ movl %ebx, (%edx)
+ movl %ebp, 4(%edx)
+ movl %ecx, 8(%edx)
+ subl $1, %eax
+ movl %esi, 12(%edx)
+ movl %edi, 16(%edx)
+ jle .L001get_out
+ movl %eax, 136(%esp)
+ movl %ecx, %edi
+ movl 132(%esp), %eax
+ movl %ebx, %ecx
+ addl $64, %eax
+ movl %ebp, %esi
+ movl %eax, 132(%esp)
+ jmp .L000start
+.L001get_out:
+ addl $108, %esp
+ popl %ebx
+ popl %ebp
+ popl %edi
+ popl %esi
+ ret
+.L_ripemd160_block_asm_host_order_end:
+ .size ripemd160_block_asm_host_order,.L_ripemd160_block_asm_host_order_end-ripemd160_block_asm_host_order
+.ident "desasm.pl"
diff --git a/secure/lib/libcrypto/i386/sha1-586.s b/secure/lib/libcrypto/i386/sha1-586.s
new file mode 100644
index 0000000..b13c665
--- /dev/null
+++ b/secure/lib/libcrypto/i386/sha1-586.s
@@ -0,0 +1,1960 @@
+ # $FreeBSD$
+ # Dont even think of reading this code
+ # It was automatically generated by sha1-586.pl
+ # Which is a perl program used to generate the x86 assember for
+ # any of elf, a.out, BSDI, Win32, gaswin (for GNU as on Win32) or Solaris
+ # eric <eay@cryptsoft.com>
+
+ .file "sha1-586.s"
+ .version "01.01"
+gcc2_compiled.:
+.text
+ .align 16
+.globl sha1_block_asm_data_order
+ .type sha1_block_asm_data_order,@function
+sha1_block_asm_data_order:
+ movl 12(%esp), %ecx
+ pushl %esi
+ sall $6, %ecx
+ movl 12(%esp), %esi
+ pushl %ebp
+ addl %esi, %ecx
+ pushl %ebx
+ movl 16(%esp), %ebp
+ pushl %edi
+ movl 12(%ebp), %edx
+ subl $108, %esp
+ movl 16(%ebp), %edi
+ movl 8(%ebp), %ebx
+ movl %ecx, 68(%esp)
+ # First we need to setup the X array
+.L000start:
+ # First, load the words onto the stack in network byte order
+ movl (%esi), %eax
+ movl 4(%esi), %ecx
+.byte 15
+.byte 200 # bswapl %eax
+.byte 15
+.byte 201 # bswapl %ecx
+ movl %eax, (%esp)
+ movl %ecx, 4(%esp)
+ movl 8(%esi), %eax
+ movl 12(%esi), %ecx
+.byte 15
+.byte 200 # bswapl %eax
+.byte 15
+.byte 201 # bswapl %ecx
+ movl %eax, 8(%esp)
+ movl %ecx, 12(%esp)
+ movl 16(%esi), %eax
+ movl 20(%esi), %ecx
+.byte 15
+.byte 200 # bswapl %eax
+.byte 15
+.byte 201 # bswapl %ecx
+ movl %eax, 16(%esp)
+ movl %ecx, 20(%esp)
+ movl 24(%esi), %eax
+ movl 28(%esi), %ecx
+.byte 15
+.byte 200 # bswapl %eax
+.byte 15
+.byte 201 # bswapl %ecx
+ movl %eax, 24(%esp)
+ movl %ecx, 28(%esp)
+ movl 32(%esi), %eax
+ movl 36(%esi), %ecx
+.byte 15
+.byte 200 # bswapl %eax
+.byte 15
+.byte 201 # bswapl %ecx
+ movl %eax, 32(%esp)
+ movl %ecx, 36(%esp)
+ movl 40(%esi), %eax
+ movl 44(%esi), %ecx
+.byte 15
+.byte 200 # bswapl %eax
+.byte 15
+.byte 201 # bswapl %ecx
+ movl %eax, 40(%esp)
+ movl %ecx, 44(%esp)
+ movl 48(%esi), %eax
+ movl 52(%esi), %ecx
+.byte 15
+.byte 200 # bswapl %eax
+.byte 15
+.byte 201 # bswapl %ecx
+ movl %eax, 48(%esp)
+ movl %ecx, 52(%esp)
+ movl 56(%esi), %eax
+ movl 60(%esi), %ecx
+.byte 15
+.byte 200 # bswapl %eax
+.byte 15
+.byte 201 # bswapl %ecx
+ movl %eax, 56(%esp)
+ movl %ecx, 60(%esp)
+ # We now have the X array on the stack
+ # starting at sp-4
+ movl %esi, 132(%esp)
+.L001shortcut:
+
+ # Start processing
+ movl (%ebp), %eax
+ movl 4(%ebp), %ecx
+ # 00_15 0
+ movl %ebx, %esi
+ movl %eax, %ebp
+ xorl %edx, %esi
+ roll $5, %ebp
+ andl %ecx, %esi
+ addl %edi, %ebp
+.byte 209
+.byte 201 # rorl $1 %ecx
+ movl (%esp), %edi
+.byte 209
+.byte 201 # rorl $1 %ecx
+ xorl %edx, %esi
+ leal 1518500249(%ebp,%edi,1),%ebp
+ movl %ecx, %edi
+ addl %ebp, %esi
+ xorl %ebx, %edi
+ movl %esi, %ebp
+ andl %eax, %edi
+ roll $5, %ebp
+ addl %edx, %ebp
+ movl 4(%esp), %edx
+.byte 209
+.byte 200 # rorl $1 %eax
+ xorl %ebx, %edi
+.byte 209
+.byte 200 # rorl $1 %eax
+ leal 1518500249(%ebp,%edx,1),%ebp
+ addl %ebp, %edi
+ # 00_15 2
+ movl %eax, %edx
+ movl %edi, %ebp
+ xorl %ecx, %edx
+ roll $5, %ebp
+ andl %esi, %edx
+ addl %ebx, %ebp
+.byte 209
+.byte 206 # rorl $1 %esi
+ movl 8(%esp), %ebx
+.byte 209
+.byte 206 # rorl $1 %esi
+ xorl %ecx, %edx
+ leal 1518500249(%ebp,%ebx,1),%ebp
+ movl %esi, %ebx
+ addl %ebp, %edx
+ xorl %eax, %ebx
+ movl %edx, %ebp
+ andl %edi, %ebx
+ roll $5, %ebp
+ addl %ecx, %ebp
+ movl 12(%esp), %ecx
+.byte 209
+.byte 207 # rorl $1 %edi
+ xorl %eax, %ebx
+.byte 209
+.byte 207 # rorl $1 %edi
+ leal 1518500249(%ebp,%ecx,1),%ebp
+ addl %ebp, %ebx
+ # 00_15 4
+ movl %edi, %ecx
+ movl %ebx, %ebp
+ xorl %esi, %ecx
+ roll $5, %ebp
+ andl %edx, %ecx
+ addl %eax, %ebp
+.byte 209
+.byte 202 # rorl $1 %edx
+ movl 16(%esp), %eax
+.byte 209
+.byte 202 # rorl $1 %edx
+ xorl %esi, %ecx
+ leal 1518500249(%ebp,%eax,1),%ebp
+ movl %edx, %eax
+ addl %ebp, %ecx
+ xorl %edi, %eax
+ movl %ecx, %ebp
+ andl %ebx, %eax
+ roll $5, %ebp
+ addl %esi, %ebp
+ movl 20(%esp), %esi
+.byte 209
+.byte 203 # rorl $1 %ebx
+ xorl %edi, %eax
+.byte 209
+.byte 203 # rorl $1 %ebx
+ leal 1518500249(%ebp,%esi,1),%ebp
+ addl %ebp, %eax
+ # 00_15 6
+ movl %ebx, %esi
+ movl %eax, %ebp
+ xorl %edx, %esi
+ roll $5, %ebp
+ andl %ecx, %esi
+ addl %edi, %ebp
+.byte 209
+.byte 201 # rorl $1 %ecx
+ movl 24(%esp), %edi
+.byte 209
+.byte 201 # rorl $1 %ecx
+ xorl %edx, %esi
+ leal 1518500249(%ebp,%edi,1),%ebp
+ movl %ecx, %edi
+ addl %ebp, %esi
+ xorl %ebx, %edi
+ movl %esi, %ebp
+ andl %eax, %edi
+ roll $5, %ebp
+ addl %edx, %ebp
+ movl 28(%esp), %edx
+.byte 209
+.byte 200 # rorl $1 %eax
+ xorl %ebx, %edi
+.byte 209
+.byte 200 # rorl $1 %eax
+ leal 1518500249(%ebp,%edx,1),%ebp
+ addl %ebp, %edi
+ # 00_15 8
+ movl %eax, %edx
+ movl %edi, %ebp
+ xorl %ecx, %edx
+ roll $5, %ebp
+ andl %esi, %edx
+ addl %ebx, %ebp
+.byte 209
+.byte 206 # rorl $1 %esi
+ movl 32(%esp), %ebx
+.byte 209
+.byte 206 # rorl $1 %esi
+ xorl %ecx, %edx
+ leal 1518500249(%ebp,%ebx,1),%ebp
+ movl %esi, %ebx
+ addl %ebp, %edx
+ xorl %eax, %ebx
+ movl %edx, %ebp
+ andl %edi, %ebx
+ roll $5, %ebp
+ addl %ecx, %ebp
+ movl 36(%esp), %ecx
+.byte 209
+.byte 207 # rorl $1 %edi
+ xorl %eax, %ebx
+.byte 209
+.byte 207 # rorl $1 %edi
+ leal 1518500249(%ebp,%ecx,1),%ebp
+ addl %ebp, %ebx
+ # 00_15 10
+ movl %edi, %ecx
+ movl %ebx, %ebp
+ xorl %esi, %ecx
+ roll $5, %ebp
+ andl %edx, %ecx
+ addl %eax, %ebp
+.byte 209
+.byte 202 # rorl $1 %edx
+ movl 40(%esp), %eax
+.byte 209
+.byte 202 # rorl $1 %edx
+ xorl %esi, %ecx
+ leal 1518500249(%ebp,%eax,1),%ebp
+ movl %edx, %eax
+ addl %ebp, %ecx
+ xorl %edi, %eax
+ movl %ecx, %ebp
+ andl %ebx, %eax
+ roll $5, %ebp
+ addl %esi, %ebp
+ movl 44(%esp), %esi
+.byte 209
+.byte 203 # rorl $1 %ebx
+ xorl %edi, %eax
+.byte 209
+.byte 203 # rorl $1 %ebx
+ leal 1518500249(%ebp,%esi,1),%ebp
+ addl %ebp, %eax
+ # 00_15 12
+ movl %ebx, %esi
+ movl %eax, %ebp
+ xorl %edx, %esi
+ roll $5, %ebp
+ andl %ecx, %esi
+ addl %edi, %ebp
+.byte 209
+.byte 201 # rorl $1 %ecx
+ movl 48(%esp), %edi
+.byte 209
+.byte 201 # rorl $1 %ecx
+ xorl %edx, %esi
+ leal 1518500249(%ebp,%edi,1),%ebp
+ movl %ecx, %edi
+ addl %ebp, %esi
+ xorl %ebx, %edi
+ movl %esi, %ebp
+ andl %eax, %edi
+ roll $5, %ebp
+ addl %edx, %ebp
+ movl 52(%esp), %edx
+.byte 209
+.byte 200 # rorl $1 %eax
+ xorl %ebx, %edi
+.byte 209
+.byte 200 # rorl $1 %eax
+ leal 1518500249(%ebp,%edx,1),%ebp
+ addl %ebp, %edi
+ # 00_15 14
+ movl %eax, %edx
+ movl %edi, %ebp
+ xorl %ecx, %edx
+ roll $5, %ebp
+ andl %esi, %edx
+ addl %ebx, %ebp
+.byte 209
+.byte 206 # rorl $1 %esi
+ movl 56(%esp), %ebx
+.byte 209
+.byte 206 # rorl $1 %esi
+ xorl %ecx, %edx
+ leal 1518500249(%ebp,%ebx,1),%ebp
+ movl %esi, %ebx
+ addl %ebp, %edx
+ xorl %eax, %ebx
+ movl %edx, %ebp
+ andl %edi, %ebx
+ roll $5, %ebp
+ addl %ecx, %ebp
+ movl 60(%esp), %ecx
+.byte 209
+.byte 207 # rorl $1 %edi
+ xorl %eax, %ebx
+.byte 209
+.byte 207 # rorl $1 %edi
+ leal 1518500249(%ebp,%ecx,1),%ebp
+ addl %ebp, %ebx
+ # 16_19 16
+ nop
+ movl (%esp), %ebp
+ movl 8(%esp), %ecx
+ xorl %ebp, %ecx
+ movl 32(%esp), %ebp
+ xorl %ebp, %ecx
+ movl 52(%esp), %ebp
+ xorl %ebp, %ecx
+ movl %edi, %ebp
+.byte 209
+.byte 193 # roll $1 %ecx
+ xorl %esi, %ebp
+ movl %ecx, (%esp)
+ andl %edx, %ebp
+ leal 1518500249(%ecx,%eax,1),%ecx
+ xorl %esi, %ebp
+ movl %ebx, %eax
+ addl %ebp, %ecx
+ roll $5, %eax
+.byte 209
+.byte 202 # rorl $1 %edx
+ addl %eax, %ecx
+ movl 4(%esp), %eax
+ movl 12(%esp), %ebp
+ xorl %ebp, %eax
+ movl 36(%esp), %ebp
+ xorl %ebp, %eax
+ movl 56(%esp), %ebp
+.byte 209
+.byte 202 # rorl $1 %edx
+ xorl %ebp, %eax
+.byte 209
+.byte 192 # roll $1 %eax
+ movl %edx, %ebp
+ xorl %edi, %ebp
+ movl %eax, 4(%esp)
+ andl %ebx, %ebp
+ leal 1518500249(%eax,%esi,1),%eax
+ xorl %edi, %ebp
+ movl %ecx, %esi
+ roll $5, %esi
+.byte 209
+.byte 203 # rorl $1 %ebx
+ addl %esi, %eax
+.byte 209
+.byte 203 # rorl $1 %ebx
+ addl %ebp, %eax
+ # 16_19 18
+ movl 8(%esp), %ebp
+ movl 16(%esp), %esi
+ xorl %ebp, %esi
+ movl 40(%esp), %ebp
+ xorl %ebp, %esi
+ movl 60(%esp), %ebp
+ xorl %ebp, %esi
+ movl %ebx, %ebp
+.byte 209
+.byte 198 # roll $1 %esi
+ xorl %edx, %ebp
+ movl %esi, 8(%esp)
+ andl %ecx, %ebp
+ leal 1518500249(%esi,%edi,1),%esi
+ xorl %edx, %ebp
+ movl %eax, %edi
+ addl %ebp, %esi
+ roll $5, %edi
+.byte 209
+.byte 201 # rorl $1 %ecx
+ addl %edi, %esi
+ movl 12(%esp), %edi
+ movl 20(%esp), %ebp
+ xorl %ebp, %edi
+ movl 44(%esp), %ebp
+ xorl %ebp, %edi
+ movl (%esp), %ebp
+.byte 209
+.byte 201 # rorl $1 %ecx
+ xorl %ebp, %edi
+.byte 209
+.byte 199 # roll $1 %edi
+ movl %ecx, %ebp
+ xorl %ebx, %ebp
+ movl %edi, 12(%esp)
+ andl %eax, %ebp
+ leal 1518500249(%edi,%edx,1),%edi
+ xorl %ebx, %ebp
+ movl %esi, %edx
+ roll $5, %edx
+.byte 209
+.byte 200 # rorl $1 %eax
+ addl %edx, %edi
+.byte 209
+.byte 200 # rorl $1 %eax
+ addl %ebp, %edi
+ # 20_39 20
+ movl 16(%esp), %edx
+ movl 24(%esp), %ebp
+ xorl %ebp, %edx
+ movl 48(%esp), %ebp
+ xorl %ebp, %edx
+ movl 4(%esp), %ebp
+ xorl %ebp, %edx
+ movl %esi, %ebp
+.byte 209
+.byte 194 # roll $1 %edx
+ xorl %eax, %ebp
+ movl %edx, 16(%esp)
+ xorl %ecx, %ebp
+ leal 1859775393(%edx,%ebx,1),%edx
+ movl %edi, %ebx
+ roll $5, %ebx
+.byte 209
+.byte 206 # rorl $1 %esi
+ addl %ebp, %ebx
+.byte 209
+.byte 206 # rorl $1 %esi
+ addl %ebx, %edx
+ # 20_39 21
+ movl 20(%esp), %ebx
+ movl 28(%esp), %ebp
+ xorl %ebp, %ebx
+ movl 52(%esp), %ebp
+ xorl %ebp, %ebx
+ movl 8(%esp), %ebp
+ xorl %ebp, %ebx
+ movl %edi, %ebp
+.byte 209
+.byte 195 # roll $1 %ebx
+ xorl %esi, %ebp
+ movl %ebx, 20(%esp)
+ xorl %eax, %ebp
+ leal 1859775393(%ebx,%ecx,1),%ebx
+ movl %edx, %ecx
+ roll $5, %ecx
+.byte 209
+.byte 207 # rorl $1 %edi
+ addl %ebp, %ecx
+.byte 209
+.byte 207 # rorl $1 %edi
+ addl %ecx, %ebx
+ # 20_39 22
+ movl 24(%esp), %ecx
+ movl 32(%esp), %ebp
+ xorl %ebp, %ecx
+ movl 56(%esp), %ebp
+ xorl %ebp, %ecx
+ movl 12(%esp), %ebp
+ xorl %ebp, %ecx
+ movl %edx, %ebp
+.byte 209
+.byte 193 # roll $1 %ecx
+ xorl %edi, %ebp
+ movl %ecx, 24(%esp)
+ xorl %esi, %ebp
+ leal 1859775393(%ecx,%eax,1),%ecx
+ movl %ebx, %eax
+ roll $5, %eax
+.byte 209
+.byte 202 # rorl $1 %edx
+ addl %ebp, %eax
+.byte 209
+.byte 202 # rorl $1 %edx
+ addl %eax, %ecx
+ # 20_39 23
+ movl 28(%esp), %eax
+ movl 36(%esp), %ebp
+ xorl %ebp, %eax
+ movl 60(%esp), %ebp
+ xorl %ebp, %eax
+ movl 16(%esp), %ebp
+ xorl %ebp, %eax
+ movl %ebx, %ebp
+.byte 209
+.byte 192 # roll $1 %eax
+ xorl %edx, %ebp
+ movl %eax, 28(%esp)
+ xorl %edi, %ebp
+ leal 1859775393(%eax,%esi,1),%eax
+ movl %ecx, %esi
+ roll $5, %esi
+.byte 209
+.byte 203 # rorl $1 %ebx
+ addl %ebp, %esi
+.byte 209
+.byte 203 # rorl $1 %ebx
+ addl %esi, %eax
+ # 20_39 24
+ movl 32(%esp), %esi
+ movl 40(%esp), %ebp
+ xorl %ebp, %esi
+ movl (%esp), %ebp
+ xorl %ebp, %esi
+ movl 20(%esp), %ebp
+ xorl %ebp, %esi
+ movl %ecx, %ebp
+.byte 209
+.byte 198 # roll $1 %esi
+ xorl %ebx, %ebp
+ movl %esi, 32(%esp)
+ xorl %edx, %ebp
+ leal 1859775393(%esi,%edi,1),%esi
+ movl %eax, %edi
+ roll $5, %edi
+.byte 209
+.byte 201 # rorl $1 %ecx
+ addl %ebp, %edi
+.byte 209
+.byte 201 # rorl $1 %ecx
+ addl %edi, %esi
+ # 20_39 25
+ movl 36(%esp), %edi
+ movl 44(%esp), %ebp
+ xorl %ebp, %edi
+ movl 4(%esp), %ebp
+ xorl %ebp, %edi
+ movl 24(%esp), %ebp
+ xorl %ebp, %edi
+ movl %eax, %ebp
+.byte 209
+.byte 199 # roll $1 %edi
+ xorl %ecx, %ebp
+ movl %edi, 36(%esp)
+ xorl %ebx, %ebp
+ leal 1859775393(%edi,%edx,1),%edi
+ movl %esi, %edx
+ roll $5, %edx
+.byte 209
+.byte 200 # rorl $1 %eax
+ addl %ebp, %edx
+.byte 209
+.byte 200 # rorl $1 %eax
+ addl %edx, %edi
+ # 20_39 26
+ movl 40(%esp), %edx
+ movl 48(%esp), %ebp
+ xorl %ebp, %edx
+ movl 8(%esp), %ebp
+ xorl %ebp, %edx
+ movl 28(%esp), %ebp
+ xorl %ebp, %edx
+ movl %esi, %ebp
+.byte 209
+.byte 194 # roll $1 %edx
+ xorl %eax, %ebp
+ movl %edx, 40(%esp)
+ xorl %ecx, %ebp
+ leal 1859775393(%edx,%ebx,1),%edx
+ movl %edi, %ebx
+ roll $5, %ebx
+.byte 209
+.byte 206 # rorl $1 %esi
+ addl %ebp, %ebx
+.byte 209
+.byte 206 # rorl $1 %esi
+ addl %ebx, %edx
+ # 20_39 27
+ movl 44(%esp), %ebx
+ movl 52(%esp), %ebp
+ xorl %ebp, %ebx
+ movl 12(%esp), %ebp
+ xorl %ebp, %ebx
+ movl 32(%esp), %ebp
+ xorl %ebp, %ebx
+ movl %edi, %ebp
+.byte 209
+.byte 195 # roll $1 %ebx
+ xorl %esi, %ebp
+ movl %ebx, 44(%esp)
+ xorl %eax, %ebp
+ leal 1859775393(%ebx,%ecx,1),%ebx
+ movl %edx, %ecx
+ roll $5, %ecx
+.byte 209
+.byte 207 # rorl $1 %edi
+ addl %ebp, %ecx
+.byte 209
+.byte 207 # rorl $1 %edi
+ addl %ecx, %ebx
+ # 20_39 28
+ movl 48(%esp), %ecx
+ movl 56(%esp), %ebp
+ xorl %ebp, %ecx
+ movl 16(%esp), %ebp
+ xorl %ebp, %ecx
+ movl 36(%esp), %ebp
+ xorl %ebp, %ecx
+ movl %edx, %ebp
+.byte 209
+.byte 193 # roll $1 %ecx
+ xorl %edi, %ebp
+ movl %ecx, 48(%esp)
+ xorl %esi, %ebp
+ leal 1859775393(%ecx,%eax,1),%ecx
+ movl %ebx, %eax
+ roll $5, %eax
+.byte 209
+.byte 202 # rorl $1 %edx
+ addl %ebp, %eax
+.byte 209
+.byte 202 # rorl $1 %edx
+ addl %eax, %ecx
+ # 20_39 29
+ movl 52(%esp), %eax
+ movl 60(%esp), %ebp
+ xorl %ebp, %eax
+ movl 20(%esp), %ebp
+ xorl %ebp, %eax
+ movl 40(%esp), %ebp
+ xorl %ebp, %eax
+ movl %ebx, %ebp
+.byte 209
+.byte 192 # roll $1 %eax
+ xorl %edx, %ebp
+ movl %eax, 52(%esp)
+ xorl %edi, %ebp
+ leal 1859775393(%eax,%esi,1),%eax
+ movl %ecx, %esi
+ roll $5, %esi
+.byte 209
+.byte 203 # rorl $1 %ebx
+ addl %ebp, %esi
+.byte 209
+.byte 203 # rorl $1 %ebx
+ addl %esi, %eax
+ # 20_39 30
+ movl 56(%esp), %esi
+ movl (%esp), %ebp
+ xorl %ebp, %esi
+ movl 24(%esp), %ebp
+ xorl %ebp, %esi
+ movl 44(%esp), %ebp
+ xorl %ebp, %esi
+ movl %ecx, %ebp
+.byte 209
+.byte 198 # roll $1 %esi
+ xorl %ebx, %ebp
+ movl %esi, 56(%esp)
+ xorl %edx, %ebp
+ leal 1859775393(%esi,%edi,1),%esi
+ movl %eax, %edi
+ roll $5, %edi
+.byte 209
+.byte 201 # rorl $1 %ecx
+ addl %ebp, %edi
+.byte 209
+.byte 201 # rorl $1 %ecx
+ addl %edi, %esi
+ # 20_39 31
+ movl 60(%esp), %edi
+ movl 4(%esp), %ebp
+ xorl %ebp, %edi
+ movl 28(%esp), %ebp
+ xorl %ebp, %edi
+ movl 48(%esp), %ebp
+ xorl %ebp, %edi
+ movl %eax, %ebp
+.byte 209
+.byte 199 # roll $1 %edi
+ xorl %ecx, %ebp
+ movl %edi, 60(%esp)
+ xorl %ebx, %ebp
+ leal 1859775393(%edi,%edx,1),%edi
+ movl %esi, %edx
+ roll $5, %edx
+.byte 209
+.byte 200 # rorl $1 %eax
+ addl %ebp, %edx
+.byte 209
+.byte 200 # rorl $1 %eax
+ addl %edx, %edi
+ # 20_39 32
+ movl (%esp), %edx
+ movl 8(%esp), %ebp
+ xorl %ebp, %edx
+ movl 32(%esp), %ebp
+ xorl %ebp, %edx
+ movl 52(%esp), %ebp
+ xorl %ebp, %edx
+ movl %esi, %ebp
+.byte 209
+.byte 194 # roll $1 %edx
+ xorl %eax, %ebp
+ movl %edx, (%esp)
+ xorl %ecx, %ebp
+ leal 1859775393(%edx,%ebx,1),%edx
+ movl %edi, %ebx
+ roll $5, %ebx
+.byte 209
+.byte 206 # rorl $1 %esi
+ addl %ebp, %ebx
+.byte 209
+.byte 206 # rorl $1 %esi
+ addl %ebx, %edx
+ # 20_39 33
+ movl 4(%esp), %ebx
+ movl 12(%esp), %ebp
+ xorl %ebp, %ebx
+ movl 36(%esp), %ebp
+ xorl %ebp, %ebx
+ movl 56(%esp), %ebp
+ xorl %ebp, %ebx
+ movl %edi, %ebp
+.byte 209
+.byte 195 # roll $1 %ebx
+ xorl %esi, %ebp
+ movl %ebx, 4(%esp)
+ xorl %eax, %ebp
+ leal 1859775393(%ebx,%ecx,1),%ebx
+ movl %edx, %ecx
+ roll $5, %ecx
+.byte 209
+.byte 207 # rorl $1 %edi
+ addl %ebp, %ecx
+.byte 209
+.byte 207 # rorl $1 %edi
+ addl %ecx, %ebx
+ # 20_39 34
+ movl 8(%esp), %ecx
+ movl 16(%esp), %ebp
+ xorl %ebp, %ecx
+ movl 40(%esp), %ebp
+ xorl %ebp, %ecx
+ movl 60(%esp), %ebp
+ xorl %ebp, %ecx
+ movl %edx, %ebp
+.byte 209
+.byte 193 # roll $1 %ecx
+ xorl %edi, %ebp
+ movl %ecx, 8(%esp)
+ xorl %esi, %ebp
+ leal 1859775393(%ecx,%eax,1),%ecx
+ movl %ebx, %eax
+ roll $5, %eax
+.byte 209
+.byte 202 # rorl $1 %edx
+ addl %ebp, %eax
+.byte 209
+.byte 202 # rorl $1 %edx
+ addl %eax, %ecx
+ # 20_39 35
+ movl 12(%esp), %eax
+ movl 20(%esp), %ebp
+ xorl %ebp, %eax
+ movl 44(%esp), %ebp
+ xorl %ebp, %eax
+ movl (%esp), %ebp
+ xorl %ebp, %eax
+ movl %ebx, %ebp
+.byte 209
+.byte 192 # roll $1 %eax
+ xorl %edx, %ebp
+ movl %eax, 12(%esp)
+ xorl %edi, %ebp
+ leal 1859775393(%eax,%esi,1),%eax
+ movl %ecx, %esi
+ roll $5, %esi
+.byte 209
+.byte 203 # rorl $1 %ebx
+ addl %ebp, %esi
+.byte 209
+.byte 203 # rorl $1 %ebx
+ addl %esi, %eax
+ # 20_39 36
+ movl 16(%esp), %esi
+ movl 24(%esp), %ebp
+ xorl %ebp, %esi
+ movl 48(%esp), %ebp
+ xorl %ebp, %esi
+ movl 4(%esp), %ebp
+ xorl %ebp, %esi
+ movl %ecx, %ebp
+.byte 209
+.byte 198 # roll $1 %esi
+ xorl %ebx, %ebp
+ movl %esi, 16(%esp)
+ xorl %edx, %ebp
+ leal 1859775393(%esi,%edi,1),%esi
+ movl %eax, %edi
+ roll $5, %edi
+.byte 209
+.byte 201 # rorl $1 %ecx
+ addl %ebp, %edi
+.byte 209
+.byte 201 # rorl $1 %ecx
+ addl %edi, %esi
+ # 20_39 37
+ movl 20(%esp), %edi
+ movl 28(%esp), %ebp
+ xorl %ebp, %edi
+ movl 52(%esp), %ebp
+ xorl %ebp, %edi
+ movl 8(%esp), %ebp
+ xorl %ebp, %edi
+ movl %eax, %ebp
+.byte 209
+.byte 199 # roll $1 %edi
+ xorl %ecx, %ebp
+ movl %edi, 20(%esp)
+ xorl %ebx, %ebp
+ leal 1859775393(%edi,%edx,1),%edi
+ movl %esi, %edx
+ roll $5, %edx
+.byte 209
+.byte 200 # rorl $1 %eax
+ addl %ebp, %edx
+.byte 209
+.byte 200 # rorl $1 %eax
+ addl %edx, %edi
+ # 20_39 38
+ movl 24(%esp), %edx
+ movl 32(%esp), %ebp
+ xorl %ebp, %edx
+ movl 56(%esp), %ebp
+ xorl %ebp, %edx
+ movl 12(%esp), %ebp
+ xorl %ebp, %edx
+ movl %esi, %ebp
+.byte 209
+.byte 194 # roll $1 %edx
+ xorl %eax, %ebp
+ movl %edx, 24(%esp)
+ xorl %ecx, %ebp
+ leal 1859775393(%edx,%ebx,1),%edx
+ movl %edi, %ebx
+ roll $5, %ebx
+.byte 209
+.byte 206 # rorl $1 %esi
+ addl %ebp, %ebx
+.byte 209
+.byte 206 # rorl $1 %esi
+ addl %ebx, %edx
+ # 20_39 39
+ movl 28(%esp), %ebx
+ movl 36(%esp), %ebp
+ xorl %ebp, %ebx
+ movl 60(%esp), %ebp
+ xorl %ebp, %ebx
+ movl 16(%esp), %ebp
+ xorl %ebp, %ebx
+ movl %edi, %ebp
+.byte 209
+.byte 195 # roll $1 %ebx
+ xorl %esi, %ebp
+ movl %ebx, 28(%esp)
+ xorl %eax, %ebp
+ leal 1859775393(%ebx,%ecx,1),%ebx
+ movl %edx, %ecx
+ roll $5, %ecx
+.byte 209
+.byte 207 # rorl $1 %edi
+ addl %ebp, %ecx
+.byte 209
+.byte 207 # rorl $1 %edi
+ addl %ecx, %ebx
+ # 40_59 40
+ movl 32(%esp), %ecx
+ movl 40(%esp), %ebp
+ xorl %ebp, %ecx
+ movl (%esp), %ebp
+ xorl %ebp, %ecx
+ movl 20(%esp), %ebp
+ xorl %ebp, %ecx
+ movl %edx, %ebp
+.byte 209
+.byte 193 # roll $1 %ecx
+ orl %edi, %ebp
+ movl %ecx, 32(%esp)
+ andl %esi, %ebp
+ leal 2400959708(%ecx,%eax,1),%ecx
+ movl %edx, %eax
+.byte 209
+.byte 202 # rorl $1 %edx
+ andl %edi, %eax
+ orl %eax, %ebp
+ movl %ebx, %eax
+ roll $5, %eax
+ addl %eax, %ebp
+ movl 36(%esp), %eax
+ addl %ebp, %ecx
+ movl 44(%esp), %ebp
+ xorl %ebp, %eax
+ movl 4(%esp), %ebp
+ xorl %ebp, %eax
+ movl 24(%esp), %ebp
+.byte 209
+.byte 202 # rorl $1 %edx
+ xorl %ebp, %eax
+.byte 209
+.byte 192 # roll $1 %eax
+ movl %ebx, %ebp
+ movl %eax, 36(%esp)
+ orl %edx, %ebp
+ leal 2400959708(%eax,%esi,1),%eax
+ movl %ebx, %esi
+ andl %edi, %ebp
+ andl %edx, %esi
+ orl %esi, %ebp
+ movl %ecx, %esi
+ roll $5, %esi
+.byte 209
+.byte 203 # rorl $1 %ebx
+ addl %esi, %ebp
+.byte 209
+.byte 203 # rorl $1 %ebx
+ addl %ebp, %eax
+ # 40_59 41
+ # 40_59 42
+ movl 40(%esp), %esi
+ movl 48(%esp), %ebp
+ xorl %ebp, %esi
+ movl 8(%esp), %ebp
+ xorl %ebp, %esi
+ movl 28(%esp), %ebp
+ xorl %ebp, %esi
+ movl %ecx, %ebp
+.byte 209
+.byte 198 # roll $1 %esi
+ orl %ebx, %ebp
+ movl %esi, 40(%esp)
+ andl %edx, %ebp
+ leal 2400959708(%esi,%edi,1),%esi
+ movl %ecx, %edi
+.byte 209
+.byte 201 # rorl $1 %ecx
+ andl %ebx, %edi
+ orl %edi, %ebp
+ movl %eax, %edi
+ roll $5, %edi
+ addl %edi, %ebp
+ movl 44(%esp), %edi
+ addl %ebp, %esi
+ movl 52(%esp), %ebp
+ xorl %ebp, %edi
+ movl 12(%esp), %ebp
+ xorl %ebp, %edi
+ movl 32(%esp), %ebp
+.byte 209
+.byte 201 # rorl $1 %ecx
+ xorl %ebp, %edi
+.byte 209
+.byte 199 # roll $1 %edi
+ movl %eax, %ebp
+ movl %edi, 44(%esp)
+ orl %ecx, %ebp
+ leal 2400959708(%edi,%edx,1),%edi
+ movl %eax, %edx
+ andl %ebx, %ebp
+ andl %ecx, %edx
+ orl %edx, %ebp
+ movl %esi, %edx
+ roll $5, %edx
+.byte 209
+.byte 200 # rorl $1 %eax
+ addl %edx, %ebp
+.byte 209
+.byte 200 # rorl $1 %eax
+ addl %ebp, %edi
+ # 40_59 43
+ # 40_59 44
+ movl 48(%esp), %edx
+ movl 56(%esp), %ebp
+ xorl %ebp, %edx
+ movl 16(%esp), %ebp
+ xorl %ebp, %edx
+ movl 36(%esp), %ebp
+ xorl %ebp, %edx
+ movl %esi, %ebp
+.byte 209
+.byte 194 # roll $1 %edx
+ orl %eax, %ebp
+ movl %edx, 48(%esp)
+ andl %ecx, %ebp
+ leal 2400959708(%edx,%ebx,1),%edx
+ movl %esi, %ebx
+.byte 209
+.byte 206 # rorl $1 %esi
+ andl %eax, %ebx
+ orl %ebx, %ebp
+ movl %edi, %ebx
+ roll $5, %ebx
+ addl %ebx, %ebp
+ movl 52(%esp), %ebx
+ addl %ebp, %edx
+ movl 60(%esp), %ebp
+ xorl %ebp, %ebx
+ movl 20(%esp), %ebp
+ xorl %ebp, %ebx
+ movl 40(%esp), %ebp
+.byte 209
+.byte 206 # rorl $1 %esi
+ xorl %ebp, %ebx
+.byte 209
+.byte 195 # roll $1 %ebx
+ movl %edi, %ebp
+ movl %ebx, 52(%esp)
+ orl %esi, %ebp
+ leal 2400959708(%ebx,%ecx,1),%ebx
+ movl %edi, %ecx
+ andl %eax, %ebp
+ andl %esi, %ecx
+ orl %ecx, %ebp
+ movl %edx, %ecx
+ roll $5, %ecx
+.byte 209
+.byte 207 # rorl $1 %edi
+ addl %ecx, %ebp
+.byte 209
+.byte 207 # rorl $1 %edi
+ addl %ebp, %ebx
+ # 40_59 45
+ # 40_59 46
+ movl 56(%esp), %ecx
+ movl (%esp), %ebp
+ xorl %ebp, %ecx
+ movl 24(%esp), %ebp
+ xorl %ebp, %ecx
+ movl 44(%esp), %ebp
+ xorl %ebp, %ecx
+ movl %edx, %ebp
+.byte 209
+.byte 193 # roll $1 %ecx
+ orl %edi, %ebp
+ movl %ecx, 56(%esp)
+ andl %esi, %ebp
+ leal 2400959708(%ecx,%eax,1),%ecx
+ movl %edx, %eax
+.byte 209
+.byte 202 # rorl $1 %edx
+ andl %edi, %eax
+ orl %eax, %ebp
+ movl %ebx, %eax
+ roll $5, %eax
+ addl %eax, %ebp
+ movl 60(%esp), %eax
+ addl %ebp, %ecx
+ movl 4(%esp), %ebp
+ xorl %ebp, %eax
+ movl 28(%esp), %ebp
+ xorl %ebp, %eax
+ movl 48(%esp), %ebp
+.byte 209
+.byte 202 # rorl $1 %edx
+ xorl %ebp, %eax
+.byte 209
+.byte 192 # roll $1 %eax
+ movl %ebx, %ebp
+ movl %eax, 60(%esp)
+ orl %edx, %ebp
+ leal 2400959708(%eax,%esi,1),%eax
+ movl %ebx, %esi
+ andl %edi, %ebp
+ andl %edx, %esi
+ orl %esi, %ebp
+ movl %ecx, %esi
+ roll $5, %esi
+.byte 209
+.byte 203 # rorl $1 %ebx
+ addl %esi, %ebp
+.byte 209
+.byte 203 # rorl $1 %ebx
+ addl %ebp, %eax
+ # 40_59 47
+ # 40_59 48
+ movl (%esp), %esi
+ movl 8(%esp), %ebp
+ xorl %ebp, %esi
+ movl 32(%esp), %ebp
+ xorl %ebp, %esi
+ movl 52(%esp), %ebp
+ xorl %ebp, %esi
+ movl %ecx, %ebp
+.byte 209
+.byte 198 # roll $1 %esi
+ orl %ebx, %ebp
+ movl %esi, (%esp)
+ andl %edx, %ebp
+ leal 2400959708(%esi,%edi,1),%esi
+ movl %ecx, %edi
+.byte 209
+.byte 201 # rorl $1 %ecx
+ andl %ebx, %edi
+ orl %edi, %ebp
+ movl %eax, %edi
+ roll $5, %edi
+ addl %edi, %ebp
+ movl 4(%esp), %edi
+ addl %ebp, %esi
+ movl 12(%esp), %ebp
+ xorl %ebp, %edi
+ movl 36(%esp), %ebp
+ xorl %ebp, %edi
+ movl 56(%esp), %ebp
+.byte 209
+.byte 201 # rorl $1 %ecx
+ xorl %ebp, %edi
+.byte 209
+.byte 199 # roll $1 %edi
+ movl %eax, %ebp
+ movl %edi, 4(%esp)
+ orl %ecx, %ebp
+ leal 2400959708(%edi,%edx,1),%edi
+ movl %eax, %edx
+ andl %ebx, %ebp
+ andl %ecx, %edx
+ orl %edx, %ebp
+ movl %esi, %edx
+ roll $5, %edx
+.byte 209
+.byte 200 # rorl $1 %eax
+ addl %edx, %ebp
+.byte 209
+.byte 200 # rorl $1 %eax
+ addl %ebp, %edi
+ # 40_59 49
+ # 40_59 50
+ movl 8(%esp), %edx
+ movl 16(%esp), %ebp
+ xorl %ebp, %edx
+ movl 40(%esp), %ebp
+ xorl %ebp, %edx
+ movl 60(%esp), %ebp
+ xorl %ebp, %edx
+ movl %esi, %ebp
+.byte 209
+.byte 194 # roll $1 %edx
+ orl %eax, %ebp
+ movl %edx, 8(%esp)
+ andl %ecx, %ebp
+ leal 2400959708(%edx,%ebx,1),%edx
+ movl %esi, %ebx
+.byte 209
+.byte 206 # rorl $1 %esi
+ andl %eax, %ebx
+ orl %ebx, %ebp
+ movl %edi, %ebx
+ roll $5, %ebx
+ addl %ebx, %ebp
+ movl 12(%esp), %ebx
+ addl %ebp, %edx
+ movl 20(%esp), %ebp
+ xorl %ebp, %ebx
+ movl 44(%esp), %ebp
+ xorl %ebp, %ebx
+ movl (%esp), %ebp
+.byte 209
+.byte 206 # rorl $1 %esi
+ xorl %ebp, %ebx
+.byte 209
+.byte 195 # roll $1 %ebx
+ movl %edi, %ebp
+ movl %ebx, 12(%esp)
+ orl %esi, %ebp
+ leal 2400959708(%ebx,%ecx,1),%ebx
+ movl %edi, %ecx
+ andl %eax, %ebp
+ andl %esi, %ecx
+ orl %ecx, %ebp
+ movl %edx, %ecx
+ roll $5, %ecx
+.byte 209
+.byte 207 # rorl $1 %edi
+ addl %ecx, %ebp
+.byte 209
+.byte 207 # rorl $1 %edi
+ addl %ebp, %ebx
+ # 40_59 51
+ # 40_59 52
+ movl 16(%esp), %ecx
+ movl 24(%esp), %ebp
+ xorl %ebp, %ecx
+ movl 48(%esp), %ebp
+ xorl %ebp, %ecx
+ movl 4(%esp), %ebp
+ xorl %ebp, %ecx
+ movl %edx, %ebp
+.byte 209
+.byte 193 # roll $1 %ecx
+ orl %edi, %ebp
+ movl %ecx, 16(%esp)
+ andl %esi, %ebp
+ leal 2400959708(%ecx,%eax,1),%ecx
+ movl %edx, %eax
+.byte 209
+.byte 202 # rorl $1 %edx
+ andl %edi, %eax
+ orl %eax, %ebp
+ movl %ebx, %eax
+ roll $5, %eax
+ addl %eax, %ebp
+ movl 20(%esp), %eax
+ addl %ebp, %ecx
+ movl 28(%esp), %ebp
+ xorl %ebp, %eax
+ movl 52(%esp), %ebp
+ xorl %ebp, %eax
+ movl 8(%esp), %ebp
+.byte 209
+.byte 202 # rorl $1 %edx
+ xorl %ebp, %eax
+.byte 209
+.byte 192 # roll $1 %eax
+ movl %ebx, %ebp
+ movl %eax, 20(%esp)
+ orl %edx, %ebp
+ leal 2400959708(%eax,%esi,1),%eax
+ movl %ebx, %esi
+ andl %edi, %ebp
+ andl %edx, %esi
+ orl %esi, %ebp
+ movl %ecx, %esi
+ roll $5, %esi
+.byte 209
+.byte 203 # rorl $1 %ebx
+ addl %esi, %ebp
+.byte 209
+.byte 203 # rorl $1 %ebx
+ addl %ebp, %eax
+ # 40_59 53
+ # 40_59 54
+ movl 24(%esp), %esi
+ movl 32(%esp), %ebp
+ xorl %ebp, %esi
+ movl 56(%esp), %ebp
+ xorl %ebp, %esi
+ movl 12(%esp), %ebp
+ xorl %ebp, %esi
+ movl %ecx, %ebp
+.byte 209
+.byte 198 # roll $1 %esi
+ orl %ebx, %ebp
+ movl %esi, 24(%esp)
+ andl %edx, %ebp
+ leal 2400959708(%esi,%edi,1),%esi
+ movl %ecx, %edi
+.byte 209
+.byte 201 # rorl $1 %ecx
+ andl %ebx, %edi
+ orl %edi, %ebp
+ movl %eax, %edi
+ roll $5, %edi
+ addl %edi, %ebp
+ movl 28(%esp), %edi
+ addl %ebp, %esi
+ movl 36(%esp), %ebp
+ xorl %ebp, %edi
+ movl 60(%esp), %ebp
+ xorl %ebp, %edi
+ movl 16(%esp), %ebp
+.byte 209
+.byte 201 # rorl $1 %ecx
+ xorl %ebp, %edi
+.byte 209
+.byte 199 # roll $1 %edi
+ movl %eax, %ebp
+ movl %edi, 28(%esp)
+ orl %ecx, %ebp
+ leal 2400959708(%edi,%edx,1),%edi
+ movl %eax, %edx
+ andl %ebx, %ebp
+ andl %ecx, %edx
+ orl %edx, %ebp
+ movl %esi, %edx
+ roll $5, %edx
+.byte 209
+.byte 200 # rorl $1 %eax
+ addl %edx, %ebp
+.byte 209
+.byte 200 # rorl $1 %eax
+ addl %ebp, %edi
+ # 40_59 55
+ # 40_59 56
+ movl 32(%esp), %edx
+ movl 40(%esp), %ebp
+ xorl %ebp, %edx
+ movl (%esp), %ebp
+ xorl %ebp, %edx
+ movl 20(%esp), %ebp
+ xorl %ebp, %edx
+ movl %esi, %ebp
+.byte 209
+.byte 194 # roll $1 %edx
+ orl %eax, %ebp
+ movl %edx, 32(%esp)
+ andl %ecx, %ebp
+ leal 2400959708(%edx,%ebx,1),%edx
+ movl %esi, %ebx
+.byte 209
+.byte 206 # rorl $1 %esi
+ andl %eax, %ebx
+ orl %ebx, %ebp
+ movl %edi, %ebx
+ roll $5, %ebx
+ addl %ebx, %ebp
+ movl 36(%esp), %ebx
+ addl %ebp, %edx
+ movl 44(%esp), %ebp
+ xorl %ebp, %ebx
+ movl 4(%esp), %ebp
+ xorl %ebp, %ebx
+ movl 24(%esp), %ebp
+.byte 209
+.byte 206 # rorl $1 %esi
+ xorl %ebp, %ebx
+.byte 209
+.byte 195 # roll $1 %ebx
+ movl %edi, %ebp
+ movl %ebx, 36(%esp)
+ orl %esi, %ebp
+ leal 2400959708(%ebx,%ecx,1),%ebx
+ movl %edi, %ecx
+ andl %eax, %ebp
+ andl %esi, %ecx
+ orl %ecx, %ebp
+ movl %edx, %ecx
+ roll $5, %ecx
+.byte 209
+.byte 207 # rorl $1 %edi
+ addl %ecx, %ebp
+.byte 209
+.byte 207 # rorl $1 %edi
+ addl %ebp, %ebx
+ # 40_59 57
+ # 40_59 58
+ movl 40(%esp), %ecx
+ movl 48(%esp), %ebp
+ xorl %ebp, %ecx
+ movl 8(%esp), %ebp
+ xorl %ebp, %ecx
+ movl 28(%esp), %ebp
+ xorl %ebp, %ecx
+ movl %edx, %ebp
+.byte 209
+.byte 193 # roll $1 %ecx
+ orl %edi, %ebp
+ movl %ecx, 40(%esp)
+ andl %esi, %ebp
+ leal 2400959708(%ecx,%eax,1),%ecx
+ movl %edx, %eax
+.byte 209
+.byte 202 # rorl $1 %edx
+ andl %edi, %eax
+ orl %eax, %ebp
+ movl %ebx, %eax
+ roll $5, %eax
+ addl %eax, %ebp
+ movl 44(%esp), %eax
+ addl %ebp, %ecx
+ movl 52(%esp), %ebp
+ xorl %ebp, %eax
+ movl 12(%esp), %ebp
+ xorl %ebp, %eax
+ movl 32(%esp), %ebp
+.byte 209
+.byte 202 # rorl $1 %edx
+ xorl %ebp, %eax
+.byte 209
+.byte 192 # roll $1 %eax
+ movl %ebx, %ebp
+ movl %eax, 44(%esp)
+ orl %edx, %ebp
+ leal 2400959708(%eax,%esi,1),%eax
+ movl %ebx, %esi
+ andl %edi, %ebp
+ andl %edx, %esi
+ orl %esi, %ebp
+ movl %ecx, %esi
+ roll $5, %esi
+.byte 209
+.byte 203 # rorl $1 %ebx
+ addl %esi, %ebp
+.byte 209
+.byte 203 # rorl $1 %ebx
+ addl %ebp, %eax
+ # 40_59 59
+ # 20_39 60
+ movl 48(%esp), %esi
+ movl 56(%esp), %ebp
+ xorl %ebp, %esi
+ movl 16(%esp), %ebp
+ xorl %ebp, %esi
+ movl 36(%esp), %ebp
+ xorl %ebp, %esi
+ movl %ecx, %ebp
+.byte 209
+.byte 198 # roll $1 %esi
+ xorl %ebx, %ebp
+ movl %esi, 48(%esp)
+ xorl %edx, %ebp
+ leal 3395469782(%esi,%edi,1),%esi
+ movl %eax, %edi
+ roll $5, %edi
+.byte 209
+.byte 201 # rorl $1 %ecx
+ addl %ebp, %edi
+.byte 209
+.byte 201 # rorl $1 %ecx
+ addl %edi, %esi
+ # 20_39 61
+ movl 52(%esp), %edi
+ movl 60(%esp), %ebp
+ xorl %ebp, %edi
+ movl 20(%esp), %ebp
+ xorl %ebp, %edi
+ movl 40(%esp), %ebp
+ xorl %ebp, %edi
+ movl %eax, %ebp
+.byte 209
+.byte 199 # roll $1 %edi
+ xorl %ecx, %ebp
+ movl %edi, 52(%esp)
+ xorl %ebx, %ebp
+ leal 3395469782(%edi,%edx,1),%edi
+ movl %esi, %edx
+ roll $5, %edx
+.byte 209
+.byte 200 # rorl $1 %eax
+ addl %ebp, %edx
+.byte 209
+.byte 200 # rorl $1 %eax
+ addl %edx, %edi
+ # 20_39 62
+ movl 56(%esp), %edx
+ movl (%esp), %ebp
+ xorl %ebp, %edx
+ movl 24(%esp), %ebp
+ xorl %ebp, %edx
+ movl 44(%esp), %ebp
+ xorl %ebp, %edx
+ movl %esi, %ebp
+.byte 209
+.byte 194 # roll $1 %edx
+ xorl %eax, %ebp
+ movl %edx, 56(%esp)
+ xorl %ecx, %ebp
+ leal 3395469782(%edx,%ebx,1),%edx
+ movl %edi, %ebx
+ roll $5, %ebx
+.byte 209
+.byte 206 # rorl $1 %esi
+ addl %ebp, %ebx
+.byte 209
+.byte 206 # rorl $1 %esi
+ addl %ebx, %edx
+ # 20_39 63
+ movl 60(%esp), %ebx
+ movl 4(%esp), %ebp
+ xorl %ebp, %ebx
+ movl 28(%esp), %ebp
+ xorl %ebp, %ebx
+ movl 48(%esp), %ebp
+ xorl %ebp, %ebx
+ movl %edi, %ebp
+.byte 209
+.byte 195 # roll $1 %ebx
+ xorl %esi, %ebp
+ movl %ebx, 60(%esp)
+ xorl %eax, %ebp
+ leal 3395469782(%ebx,%ecx,1),%ebx
+ movl %edx, %ecx
+ roll $5, %ecx
+.byte 209
+.byte 207 # rorl $1 %edi
+ addl %ebp, %ecx
+.byte 209
+.byte 207 # rorl $1 %edi
+ addl %ecx, %ebx
+ # 20_39 64
+ movl (%esp), %ecx
+ movl 8(%esp), %ebp
+ xorl %ebp, %ecx
+ movl 32(%esp), %ebp
+ xorl %ebp, %ecx
+ movl 52(%esp), %ebp
+ xorl %ebp, %ecx
+ movl %edx, %ebp
+.byte 209
+.byte 193 # roll $1 %ecx
+ xorl %edi, %ebp
+ movl %ecx, (%esp)
+ xorl %esi, %ebp
+ leal 3395469782(%ecx,%eax,1),%ecx
+ movl %ebx, %eax
+ roll $5, %eax
+.byte 209
+.byte 202 # rorl $1 %edx
+ addl %ebp, %eax
+.byte 209
+.byte 202 # rorl $1 %edx
+ addl %eax, %ecx
+ # 20_39 65
+ movl 4(%esp), %eax
+ movl 12(%esp), %ebp
+ xorl %ebp, %eax
+ movl 36(%esp), %ebp
+ xorl %ebp, %eax
+ movl 56(%esp), %ebp
+ xorl %ebp, %eax
+ movl %ebx, %ebp
+.byte 209
+.byte 192 # roll $1 %eax
+ xorl %edx, %ebp
+ movl %eax, 4(%esp)
+ xorl %edi, %ebp
+ leal 3395469782(%eax,%esi,1),%eax
+ movl %ecx, %esi
+ roll $5, %esi
+.byte 209
+.byte 203 # rorl $1 %ebx
+ addl %ebp, %esi
+.byte 209
+.byte 203 # rorl $1 %ebx
+ addl %esi, %eax
+ # 20_39 66
+ movl 8(%esp), %esi
+ movl 16(%esp), %ebp
+ xorl %ebp, %esi
+ movl 40(%esp), %ebp
+ xorl %ebp, %esi
+ movl 60(%esp), %ebp
+ xorl %ebp, %esi
+ movl %ecx, %ebp
+.byte 209
+.byte 198 # roll $1 %esi
+ xorl %ebx, %ebp
+ movl %esi, 8(%esp)
+ xorl %edx, %ebp
+ leal 3395469782(%esi,%edi,1),%esi
+ movl %eax, %edi
+ roll $5, %edi
+.byte 209
+.byte 201 # rorl $1 %ecx
+ addl %ebp, %edi
+.byte 209
+.byte 201 # rorl $1 %ecx
+ addl %edi, %esi
+ # 20_39 67
+ movl 12(%esp), %edi
+ movl 20(%esp), %ebp
+ xorl %ebp, %edi
+ movl 44(%esp), %ebp
+ xorl %ebp, %edi
+ movl (%esp), %ebp
+ xorl %ebp, %edi
+ movl %eax, %ebp
+.byte 209
+.byte 199 # roll $1 %edi
+ xorl %ecx, %ebp
+ movl %edi, 12(%esp)
+ xorl %ebx, %ebp
+ leal 3395469782(%edi,%edx,1),%edi
+ movl %esi, %edx
+ roll $5, %edx
+.byte 209
+.byte 200 # rorl $1 %eax
+ addl %ebp, %edx
+.byte 209
+.byte 200 # rorl $1 %eax
+ addl %edx, %edi
+ # 20_39 68
+ movl 16(%esp), %edx
+ movl 24(%esp), %ebp
+ xorl %ebp, %edx
+ movl 48(%esp), %ebp
+ xorl %ebp, %edx
+ movl 4(%esp), %ebp
+ xorl %ebp, %edx
+ movl %esi, %ebp
+.byte 209
+.byte 194 # roll $1 %edx
+ xorl %eax, %ebp
+ movl %edx, 16(%esp)
+ xorl %ecx, %ebp
+ leal 3395469782(%edx,%ebx,1),%edx
+ movl %edi, %ebx
+ roll $5, %ebx
+.byte 209
+.byte 206 # rorl $1 %esi
+ addl %ebp, %ebx
+.byte 209
+.byte 206 # rorl $1 %esi
+ addl %ebx, %edx
+ # 20_39 69
+ movl 20(%esp), %ebx
+ movl 28(%esp), %ebp
+ xorl %ebp, %ebx
+ movl 52(%esp), %ebp
+ xorl %ebp, %ebx
+ movl 8(%esp), %ebp
+ xorl %ebp, %ebx
+ movl %edi, %ebp
+.byte 209
+.byte 195 # roll $1 %ebx
+ xorl %esi, %ebp
+ movl %ebx, 20(%esp)
+ xorl %eax, %ebp
+ leal 3395469782(%ebx,%ecx,1),%ebx
+ movl %edx, %ecx
+ roll $5, %ecx
+.byte 209
+.byte 207 # rorl $1 %edi
+ addl %ebp, %ecx
+.byte 209
+.byte 207 # rorl $1 %edi
+ addl %ecx, %ebx
+ # 20_39 70
+ movl 24(%esp), %ecx
+ movl 32(%esp), %ebp
+ xorl %ebp, %ecx
+ movl 56(%esp), %ebp
+ xorl %ebp, %ecx
+ movl 12(%esp), %ebp
+ xorl %ebp, %ecx
+ movl %edx, %ebp
+.byte 209
+.byte 193 # roll $1 %ecx
+ xorl %edi, %ebp
+ movl %ecx, 24(%esp)
+ xorl %esi, %ebp
+ leal 3395469782(%ecx,%eax,1),%ecx
+ movl %ebx, %eax
+ roll $5, %eax
+.byte 209
+.byte 202 # rorl $1 %edx
+ addl %ebp, %eax
+.byte 209
+.byte 202 # rorl $1 %edx
+ addl %eax, %ecx
+ # 20_39 71
+ movl 28(%esp), %eax
+ movl 36(%esp), %ebp
+ xorl %ebp, %eax
+ movl 60(%esp), %ebp
+ xorl %ebp, %eax
+ movl 16(%esp), %ebp
+ xorl %ebp, %eax
+ movl %ebx, %ebp
+.byte 209
+.byte 192 # roll $1 %eax
+ xorl %edx, %ebp
+ movl %eax, 28(%esp)
+ xorl %edi, %ebp
+ leal 3395469782(%eax,%esi,1),%eax
+ movl %ecx, %esi
+ roll $5, %esi
+.byte 209
+.byte 203 # rorl $1 %ebx
+ addl %ebp, %esi
+.byte 209
+.byte 203 # rorl $1 %ebx
+ addl %esi, %eax
+ # 20_39 72
+ movl 32(%esp), %esi
+ movl 40(%esp), %ebp
+ xorl %ebp, %esi
+ movl (%esp), %ebp
+ xorl %ebp, %esi
+ movl 20(%esp), %ebp
+ xorl %ebp, %esi
+ movl %ecx, %ebp
+.byte 209
+.byte 198 # roll $1 %esi
+ xorl %ebx, %ebp
+ movl %esi, 32(%esp)
+ xorl %edx, %ebp
+ leal 3395469782(%esi,%edi,1),%esi
+ movl %eax, %edi
+ roll $5, %edi
+.byte 209
+.byte 201 # rorl $1 %ecx
+ addl %ebp, %edi
+.byte 209
+.byte 201 # rorl $1 %ecx
+ addl %edi, %esi
+ # 20_39 73
+ movl 36(%esp), %edi
+ movl 44(%esp), %ebp
+ xorl %ebp, %edi
+ movl 4(%esp), %ebp
+ xorl %ebp, %edi
+ movl 24(%esp), %ebp
+ xorl %ebp, %edi
+ movl %eax, %ebp
+.byte 209
+.byte 199 # roll $1 %edi
+ xorl %ecx, %ebp
+ movl %edi, 36(%esp)
+ xorl %ebx, %ebp
+ leal 3395469782(%edi,%edx,1),%edi
+ movl %esi, %edx
+ roll $5, %edx
+.byte 209
+.byte 200 # rorl $1 %eax
+ addl %ebp, %edx
+.byte 209
+.byte 200 # rorl $1 %eax
+ addl %edx, %edi
+ # 20_39 74
+ movl 40(%esp), %edx
+ movl 48(%esp), %ebp
+ xorl %ebp, %edx
+ movl 8(%esp), %ebp
+ xorl %ebp, %edx
+ movl 28(%esp), %ebp
+ xorl %ebp, %edx
+ movl %esi, %ebp
+.byte 209
+.byte 194 # roll $1 %edx
+ xorl %eax, %ebp
+ movl %edx, 40(%esp)
+ xorl %ecx, %ebp
+ leal 3395469782(%edx,%ebx,1),%edx
+ movl %edi, %ebx
+ roll $5, %ebx
+.byte 209
+.byte 206 # rorl $1 %esi
+ addl %ebp, %ebx
+.byte 209
+.byte 206 # rorl $1 %esi
+ addl %ebx, %edx
+ # 20_39 75
+ movl 44(%esp), %ebx
+ movl 52(%esp), %ebp
+ xorl %ebp, %ebx
+ movl 12(%esp), %ebp
+ xorl %ebp, %ebx
+ movl 32(%esp), %ebp
+ xorl %ebp, %ebx
+ movl %edi, %ebp
+.byte 209
+.byte 195 # roll $1 %ebx
+ xorl %esi, %ebp
+ movl %ebx, 44(%esp)
+ xorl %eax, %ebp
+ leal 3395469782(%ebx,%ecx,1),%ebx
+ movl %edx, %ecx
+ roll $5, %ecx
+.byte 209
+.byte 207 # rorl $1 %edi
+ addl %ebp, %ecx
+.byte 209
+.byte 207 # rorl $1 %edi
+ addl %ecx, %ebx
+ # 20_39 76
+ movl 48(%esp), %ecx
+ movl 56(%esp), %ebp
+ xorl %ebp, %ecx
+ movl 16(%esp), %ebp
+ xorl %ebp, %ecx
+ movl 36(%esp), %ebp
+ xorl %ebp, %ecx
+ movl %edx, %ebp
+.byte 209
+.byte 193 # roll $1 %ecx
+ xorl %edi, %ebp
+ movl %ecx, 48(%esp)
+ xorl %esi, %ebp
+ leal 3395469782(%ecx,%eax,1),%ecx
+ movl %ebx, %eax
+ roll $5, %eax
+.byte 209
+.byte 202 # rorl $1 %edx
+ addl %ebp, %eax
+.byte 209
+.byte 202 # rorl $1 %edx
+ addl %eax, %ecx
+ # 20_39 77
+ movl 52(%esp), %eax
+ movl 60(%esp), %ebp
+ xorl %ebp, %eax
+ movl 20(%esp), %ebp
+ xorl %ebp, %eax
+ movl 40(%esp), %ebp
+ xorl %ebp, %eax
+ movl %ebx, %ebp
+.byte 209
+.byte 192 # roll $1 %eax
+ xorl %edx, %ebp
+ movl %eax, 52(%esp)
+ xorl %edi, %ebp
+ leal 3395469782(%eax,%esi,1),%eax
+ movl %ecx, %esi
+ roll $5, %esi
+.byte 209
+.byte 203 # rorl $1 %ebx
+ addl %ebp, %esi
+.byte 209
+.byte 203 # rorl $1 %ebx
+ addl %esi, %eax
+ # 20_39 78
+ movl 56(%esp), %esi
+ movl (%esp), %ebp
+ xorl %ebp, %esi
+ movl 24(%esp), %ebp
+ xorl %ebp, %esi
+ movl 44(%esp), %ebp
+ xorl %ebp, %esi
+ movl %ecx, %ebp
+.byte 209
+.byte 198 # roll $1 %esi
+ xorl %ebx, %ebp
+ movl %esi, 56(%esp)
+ xorl %edx, %ebp
+ leal 3395469782(%esi,%edi,1),%esi
+ movl %eax, %edi
+ roll $5, %edi
+.byte 209
+.byte 201 # rorl $1 %ecx
+ addl %ebp, %edi
+.byte 209
+.byte 201 # rorl $1 %ecx
+ addl %edi, %esi
+ # 20_39 79
+ movl 60(%esp), %edi
+ movl 4(%esp), %ebp
+ xorl %ebp, %edi
+ movl 28(%esp), %ebp
+ xorl %ebp, %edi
+ movl 48(%esp), %ebp
+ xorl %ebp, %edi
+ movl %eax, %ebp
+.byte 209
+.byte 199 # roll $1 %edi
+ xorl %ecx, %ebp
+ movl %edi, 60(%esp)
+ xorl %ebx, %ebp
+ leal 3395469782(%edi,%edx,1),%edi
+ movl %esi, %edx
+ roll $5, %edx
+ addl %ebp, %edx
+ movl 128(%esp), %ebp
+.byte 209
+.byte 200 # rorl $1 %eax
+ addl %edx, %edi
+.byte 209
+.byte 200 # rorl $1 %eax
+ # End processing
+
+ movl 12(%ebp), %edx
+ addl %ecx, %edx
+ movl 4(%ebp), %ecx
+ addl %esi, %ecx
+ movl %eax, %esi
+ movl (%ebp), %eax
+ movl %edx, 12(%ebp)
+ addl %edi, %eax
+ movl 16(%ebp), %edi
+ addl %ebx, %edi
+ movl 8(%ebp), %ebx
+ addl %esi, %ebx
+ movl %eax, (%ebp)
+ movl 132(%esp), %esi
+ movl %ebx, 8(%ebp)
+ addl $64, %esi
+ movl 68(%esp), %eax
+ movl %edi, 16(%ebp)
+ cmpl %eax, %esi
+ movl %ecx, 4(%ebp)
+ jl .L000start
+ addl $108, %esp
+ popl %edi
+ popl %ebx
+ popl %ebp
+ popl %esi
+ ret
+.L_sha1_block_asm_data_order_end:
+ .size sha1_block_asm_data_order,.L_sha1_block_asm_data_order_end-sha1_block_asm_data_order
+.ident "desasm.pl"
+.text
+ .align 16
+.globl sha1_block_asm_host_order
+ .type sha1_block_asm_host_order,@function
+sha1_block_asm_host_order:
+ movl 12(%esp), %ecx
+ pushl %esi
+ sall $6, %ecx
+ movl 12(%esp), %esi
+ pushl %ebp
+ addl %esi, %ecx
+ pushl %ebx
+ movl 16(%esp), %ebp
+ pushl %edi
+ movl 12(%ebp), %edx
+ subl $108, %esp
+ movl 16(%ebp), %edi
+ movl 8(%ebp), %ebx
+ movl %ecx, 68(%esp)
+ # First we need to setup the X array
+ movl (%esi), %eax
+ movl 4(%esi), %ecx
+ movl %eax, (%esp)
+ movl %ecx, 4(%esp)
+ movl 8(%esi), %eax
+ movl 12(%esi), %ecx
+ movl %eax, 8(%esp)
+ movl %ecx, 12(%esp)
+ movl 16(%esi), %eax
+ movl 20(%esi), %ecx
+ movl %eax, 16(%esp)
+ movl %ecx, 20(%esp)
+ movl 24(%esi), %eax
+ movl 28(%esi), %ecx
+ movl %eax, 24(%esp)
+ movl %ecx, 28(%esp)
+ movl 32(%esi), %eax
+ movl 36(%esi), %ecx
+ movl %eax, 32(%esp)
+ movl %ecx, 36(%esp)
+ movl 40(%esi), %eax
+ movl 44(%esi), %ecx
+ movl %eax, 40(%esp)
+ movl %ecx, 44(%esp)
+ movl 48(%esi), %eax
+ movl 52(%esi), %ecx
+ movl %eax, 48(%esp)
+ movl %ecx, 52(%esp)
+ movl 56(%esi), %eax
+ movl 60(%esi), %ecx
+ movl %eax, 56(%esp)
+ movl %ecx, 60(%esp)
+ jmp .L001shortcut
+.L_sha1_block_asm_host_order_end:
+ .size sha1_block_asm_host_order,.L_sha1_block_asm_host_order_end-sha1_block_asm_host_order
+.ident "desasm.pl"
OpenPOWER on IntegriCloud