summaryrefslogtreecommitdiffstats
path: root/sys/crypto/blowfish
diff options
context:
space:
mode:
authorume <ume@FreeBSD.org>2002-03-05 09:19:02 +0000
committerume <ume@FreeBSD.org>2002-03-05 09:19:02 +0000
commit92361f6aadf0c42e4292208de34b080d7479303c (patch)
treef180dd9b96cf2c86471621a490166f4833fd61e2 /sys/crypto/blowfish
parent1fd266f1df3466b293c974072664403373c2e7da (diff)
downloadFreeBSD-src-92361f6aadf0c42e4292208de34b080d7479303c.zip
FreeBSD-src-92361f6aadf0c42e4292208de34b080d7479303c.tar.gz
- Speedup 3DES by using assembly code for i386.
- Sync des/blowfish to more recent openssl. Obtained from: KAME/NetBSD MFC after: 2 weeks
Diffstat (limited to 'sys/crypto/blowfish')
-rw-r--r--sys/crypto/blowfish/arch/i386/bf_enc.S17
-rw-r--r--sys/crypto/blowfish/arch/i386/bf_enc_586.S762
-rw-r--r--sys/crypto/blowfish/arch/i386/bf_enc_686.S734
-rw-r--r--sys/crypto/blowfish/bf_enc.c125
-rw-r--r--sys/crypto/blowfish/bf_locl.h4
-rw-r--r--sys/crypto/blowfish/bf_pi.h4
-rw-r--r--sys/crypto/blowfish/bf_skey.c8
-rw-r--r--sys/crypto/blowfish/blowfish.h8
8 files changed, 1596 insertions, 66 deletions
diff --git a/sys/crypto/blowfish/arch/i386/bf_enc.S b/sys/crypto/blowfish/arch/i386/bf_enc.S
new file mode 100644
index 0000000..a261e55
--- /dev/null
+++ b/sys/crypto/blowfish/arch/i386/bf_enc.S
@@ -0,0 +1,17 @@
+/* $NetBSD: bf_enc.S,v 1.1 2001/09/09 11:01:01 tls Exp $ */
+/* $FreeBSD$ */
+
+/*
+ * Written by Jason R. Thorpe <thorpej@zembu.com> and Thor Lancelot Simon
+ * <tls@netbsd.org>. Public domain.
+ */
+
+/*
+ * XXX Should use CPP symbols defined as a result of
+ * XXX `cc -mcpu=pentiumpro'.
+ */
+#if defined(I386_CPU) || defined(I486_CPU) || defined(I586_CPU)
+#include "bf_enc_586.S"
+#else
+#include "bf_enc_686.S"
+#endif
diff --git a/sys/crypto/blowfish/arch/i386/bf_enc_586.S b/sys/crypto/blowfish/arch/i386/bf_enc_586.S
new file mode 100644
index 0000000..f6b3cef
--- /dev/null
+++ b/sys/crypto/blowfish/arch/i386/bf_enc_586.S
@@ -0,0 +1,762 @@
+/* $NetBSD: bf_enc_586.S,v 1.1 2001/09/09 11:01:01 tls Exp $ */
+/* $FreeBSD$ */
+
+/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
+ * All rights reserved.
+ *
+ * This package is an SSL implementation written
+ * by Eric Young (eay@cryptsoft.com).
+ * The implementation was written so as to conform with Netscapes SSL.
+ *
+ * This library is free for commercial and non-commercial use as long as
+ * the following conditions are aheared to. The following conditions
+ * apply to all code found in this distribution, be it the RC4, RSA,
+ * lhash, DES, etc., code; not just the SSL code. The SSL documentation
+ * included with this distribution is covered by the same copyright terms
+ * except that the holder is Tim Hudson (tjh@cryptsoft.com).
+ *
+ * Copyright remains Eric Young's, and as such any Copyright notices in
+ * the code are not to be removed.
+ * If this package is used in a product, Eric Young should be given attribution
+ * as the author of the parts of the library used.
+ * This can be in the form of a textual message at program startup or
+ * in documentation (online or textual) provided with the package.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. All advertising materials mentioning features or use of this software
+ * must display the following acknowledgement:
+ * "This product includes cryptographic software written by
+ * Eric Young (eay@cryptsoft.com)"
+ * The word 'cryptographic' can be left out if the rouines from the library
+ * being used are not cryptographic related :-).
+ * 4. If you include any Windows specific code (or a derivative thereof) from
+ * the apps directory (application code) you must include an acknowledgement:
+ * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
+ *
+ * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ *
+ * The licence and distribution terms for any publically available version or
+ * derivative of this code cannot be changed. i.e. this code cannot simply be
+ * copied and put under another distribution licence
+ * [including the GNU Public Licence.]
+ */
+
+/*
+ * Modified from the output of `perl bf-586.pl elf' by
+ * Jason R. Thorpe <thorpej@zembu.com> and Thor Lancelot Simon
+ * <tls@netbsd.org>
+ */
+
+#include <i386/include/asm.h>
+#define _C_LABEL CNAME
+
+ENTRY(BF_encrypt)
+ pushl %ebp
+ pushl %ebx
+ movl 12(%esp), %ebx
+ movl 16(%esp), %ebp
+ pushl %esi
+ pushl %edi
+ /* Load the 2 words */
+ movl (%ebx), %edi
+ movl 4(%ebx), %esi
+ xorl %eax, %eax
+ movl (%ebp), %ebx
+ xorl %ecx, %ecx
+ xorl %ebx, %edi
+
+ /* Round 0 */
+ movl 4(%ebp), %edx
+ movl %edi, %ebx
+ xorl %edx, %esi
+ shrl $16, %ebx
+ movl %edi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %esi
+
+ /* Round 1 */
+ movl 8(%ebp), %edx
+ movl %esi, %ebx
+ xorl %edx, %edi
+ shrl $16, %ebx
+ movl %esi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %edi
+
+ /* Round 2 */
+ movl 12(%ebp), %edx
+ movl %edi, %ebx
+ xorl %edx, %esi
+ shrl $16, %ebx
+ movl %edi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %esi
+
+ /* Round 3 */
+ movl 16(%ebp), %edx
+ movl %esi, %ebx
+ xorl %edx, %edi
+ shrl $16, %ebx
+ movl %esi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %edi
+
+ /* Round 4 */
+ movl 20(%ebp), %edx
+ movl %edi, %ebx
+ xorl %edx, %esi
+ shrl $16, %ebx
+ movl %edi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %esi
+
+ /* Round 5 */
+ movl 24(%ebp), %edx
+ movl %esi, %ebx
+ xorl %edx, %edi
+ shrl $16, %ebx
+ movl %esi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %edi
+
+ /* Round 6 */
+ movl 28(%ebp), %edx
+ movl %edi, %ebx
+ xorl %edx, %esi
+ shrl $16, %ebx
+ movl %edi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %esi
+
+ /* Round 7 */
+ movl 32(%ebp), %edx
+ movl %esi, %ebx
+ xorl %edx, %edi
+ shrl $16, %ebx
+ movl %esi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %edi
+
+ /* Round 8 */
+ movl 36(%ebp), %edx
+ movl %edi, %ebx
+ xorl %edx, %esi
+ shrl $16, %ebx
+ movl %edi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %esi
+
+ /* Round 9 */
+ movl 40(%ebp), %edx
+ movl %esi, %ebx
+ xorl %edx, %edi
+ shrl $16, %ebx
+ movl %esi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %edi
+
+ /* Round 10 */
+ movl 44(%ebp), %edx
+ movl %edi, %ebx
+ xorl %edx, %esi
+ shrl $16, %ebx
+ movl %edi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %esi
+
+ /* Round 11 */
+ movl 48(%ebp), %edx
+ movl %esi, %ebx
+ xorl %edx, %edi
+ shrl $16, %ebx
+ movl %esi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %edi
+
+ /* Round 12 */
+ movl 52(%ebp), %edx
+ movl %edi, %ebx
+ xorl %edx, %esi
+ shrl $16, %ebx
+ movl %edi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %esi
+
+ /* Round 13 */
+ movl 56(%ebp), %edx
+ movl %esi, %ebx
+ xorl %edx, %edi
+ shrl $16, %ebx
+ movl %esi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %edi
+
+ /* Round 14 */
+ movl 60(%ebp), %edx
+ movl %edi, %ebx
+ xorl %edx, %esi
+ shrl $16, %ebx
+ movl %edi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %esi
+
+ /* Round 15 */
+ movl 64(%ebp), %edx
+ movl %esi, %ebx
+ xorl %edx, %edi
+ shrl $16, %ebx
+ movl %esi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ # Load parameter 0 (16) enc=1
+ movl 20(%esp), %eax
+ xorl %ebx, %edi
+ movl 68(%ebp), %edx
+ xorl %edx, %esi
+ movl %edi, 4(%eax)
+ movl %esi, (%eax)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.L_BF_encrypt_end:
+ .size _C_LABEL(BF_encrypt),.L_BF_encrypt_end-_C_LABEL(BF_encrypt)
+
+ENTRY(BF_decrypt)
+ pushl %ebp
+ pushl %ebx
+ movl 12(%esp), %ebx
+ movl 16(%esp), %ebp
+ pushl %esi
+ pushl %edi
+ # Load the 2 words
+ movl (%ebx), %edi
+ movl 4(%ebx), %esi
+ xorl %eax, %eax
+ movl 68(%ebp), %ebx
+ xorl %ecx, %ecx
+ xorl %ebx, %edi
+
+ /* Round 16 */
+ movl 64(%ebp), %edx
+ movl %edi, %ebx
+ xorl %edx, %esi
+ shrl $16, %ebx
+ movl %edi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %esi
+
+ /* Round 15 */
+ movl 60(%ebp), %edx
+ movl %esi, %ebx
+ xorl %edx, %edi
+ shrl $16, %ebx
+ movl %esi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %edi
+
+ /* Round 14 */
+ movl 56(%ebp), %edx
+ movl %edi, %ebx
+ xorl %edx, %esi
+ shrl $16, %ebx
+ movl %edi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %esi
+
+ /* Round 13 */
+ movl 52(%ebp), %edx
+ movl %esi, %ebx
+ xorl %edx, %edi
+ shrl $16, %ebx
+ movl %esi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %edi
+
+ /* Round 12 */
+ movl 48(%ebp), %edx
+ movl %edi, %ebx
+ xorl %edx, %esi
+ shrl $16, %ebx
+ movl %edi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %esi
+
+ /* Round 11 */
+ movl 44(%ebp), %edx
+ movl %esi, %ebx
+ xorl %edx, %edi
+ shrl $16, %ebx
+ movl %esi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %edi
+
+ /* Round 10 */
+ movl 40(%ebp), %edx
+ movl %edi, %ebx
+ xorl %edx, %esi
+ shrl $16, %ebx
+ movl %edi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %esi
+
+ /* Round 9 */
+ movl 36(%ebp), %edx
+ movl %esi, %ebx
+ xorl %edx, %edi
+ shrl $16, %ebx
+ movl %esi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %edi
+
+ /* Round 8 */
+ movl 32(%ebp), %edx
+ movl %edi, %ebx
+ xorl %edx, %esi
+ shrl $16, %ebx
+ movl %edi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %esi
+
+ /* Round 7 */
+ movl 28(%ebp), %edx
+ movl %esi, %ebx
+ xorl %edx, %edi
+ shrl $16, %ebx
+ movl %esi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %edi
+
+ /* Round 6 */
+ movl 24(%ebp), %edx
+ movl %edi, %ebx
+ xorl %edx, %esi
+ shrl $16, %ebx
+ movl %edi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %esi
+
+ /* Round 5 */
+ movl 20(%ebp), %edx
+ movl %esi, %ebx
+ xorl %edx, %edi
+ shrl $16, %ebx
+ movl %esi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %edi
+
+ /* Round 4 */
+ movl 16(%ebp), %edx
+ movl %edi, %ebx
+ xorl %edx, %esi
+ shrl $16, %ebx
+ movl %edi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %esi
+
+ /* Round 3 */
+ movl 12(%ebp), %edx
+ movl %esi, %ebx
+ xorl %edx, %edi
+ shrl $16, %ebx
+ movl %esi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %edi
+
+ /* Round 2 */
+ movl 8(%ebp), %edx
+ movl %edi, %ebx
+ xorl %edx, %esi
+ shrl $16, %ebx
+ movl %edi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ xorl %eax, %eax
+ xorl %ebx, %esi
+
+ /* Round 1 */
+ movl 4(%ebp), %edx
+ movl %esi, %ebx
+ xorl %edx, %edi
+ shrl $16, %ebx
+ movl %esi, %edx
+ movb %bh, %al
+ andl $255, %ebx
+ movb %dh, %cl
+ andl $255, %edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax, %ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax, %ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx, %ebx
+ # Load parameter 0 (1) enc=0
+ movl 20(%esp), %eax
+ xorl %ebx, %edi
+ movl (%ebp), %edx
+ xorl %edx, %esi
+ movl %edi, 4(%eax)
+ movl %esi, (%eax)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.L_BF_decrypt_end:
+ .size _C_LABEL(BF_decrypt),.L_BF_decrypt_end-_C_LABEL(BF_decrypt)
diff --git a/sys/crypto/blowfish/arch/i386/bf_enc_686.S b/sys/crypto/blowfish/arch/i386/bf_enc_686.S
new file mode 100644
index 0000000..e480e65
--- /dev/null
+++ b/sys/crypto/blowfish/arch/i386/bf_enc_686.S
@@ -0,0 +1,734 @@
+/* $NetBSD: bf_enc_686.S,v 1.1 2001/09/09 11:01:02 tls Exp $ */
+/* $FreeBSD$ */
+
+/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
+ * All rights reserved.
+ *
+ * This package is an SSL implementation written
+ * by Eric Young (eay@cryptsoft.com).
+ * The implementation was written so as to conform with Netscapes SSL.
+ *
+ * This library is free for commercial and non-commercial use as long as
+ * the following conditions are aheared to. The following conditions
+ * apply to all code found in this distribution, be it the RC4, RSA,
+ * lhash, DES, etc., code; not just the SSL code. The SSL documentation
+ * included with this distribution is covered by the same copyright terms
+ * except that the holder is Tim Hudson (tjh@cryptsoft.com).
+ *
+ * Copyright remains Eric Young's, and as such any Copyright notices in
+ * the code are not to be removed.
+ * If this package is used in a product, Eric Young should be given attribution
+ * as the author of the parts of the library used.
+ * This can be in the form of a textual message at program startup or
+ * in documentation (online or textual) provided with the package.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. All advertising materials mentioning features or use of this software
+ * must display the following acknowledgement:
+ * "This product includes cryptographic software written by
+ * Eric Young (eay@cryptsoft.com)"
+ * The word 'cryptographic' can be left out if the rouines from the library
+ * being used are not cryptographic related :-).
+ * 4. If you include any Windows specific code (or a derivative thereof) from
+ * the apps directory (application code) you must include an acknowledgement:
+ * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
+ *
+ * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ *
+ * The licence and distribution terms for any publically available version or
+ * derivative of this code cannot be changed. i.e. this code cannot simply be
+ * copied and put under another distribution licence
+ * [including the GNU Public Licence.]
+ */
+
+/*
+ * Modified from the output of `perl bf-686.pl elf' by
+ * Jason R. Thorpe <thorpej@zembu.com> and Thor Lancelot Simon
+ * <tls@netbsd.org>
+ */
+
+#include <i386/include/asm.h>
+#define _C_LABEL CNAME
+
+ENTRY(BF_encrypt)
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+
+
+ /* Load the 2 words */
+ movl 20(%esp), %eax
+ movl (%eax), %ecx
+ movl 4(%eax), %edx
+
+ /* P pointer, s and enc flag */
+ movl 24(%esp), %edi
+ xorl %eax, %eax
+ xorl %ebx, %ebx
+ xorl (%edi), %ecx
+
+ /* Round 0 */
+ rorl $16, %ecx
+ movl 4(%edi), %esi
+ movb %ch, %al
+ movb %cl, %bl
+ rorl $16, %ecx
+ xorl %esi, %edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch, %al
+ movb %cl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %edx
+
+ /* Round 1 */
+ rorl $16, %edx
+ movl 8(%edi), %esi
+ movb %dh, %al
+ movb %dl, %bl
+ rorl $16, %edx
+ xorl %esi, %ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh, %al
+ movb %dl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %ecx
+
+ /* Round 2 */
+ rorl $16, %ecx
+ movl 12(%edi), %esi
+ movb %ch, %al
+ movb %cl, %bl
+ rorl $16, %ecx
+ xorl %esi, %edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch, %al
+ movb %cl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %edx
+
+ /* Round 3 */
+ rorl $16, %edx
+ movl 16(%edi), %esi
+ movb %dh, %al
+ movb %dl, %bl
+ rorl $16, %edx
+ xorl %esi, %ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh, %al
+ movb %dl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %ecx
+
+ /* Round 4 */
+ rorl $16, %ecx
+ movl 20(%edi), %esi
+ movb %ch, %al
+ movb %cl, %bl
+ rorl $16, %ecx
+ xorl %esi, %edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch, %al
+ movb %cl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %edx
+
+ /* Round 5 */
+ rorl $16, %edx
+ movl 24(%edi), %esi
+ movb %dh, %al
+ movb %dl, %bl
+ rorl $16, %edx
+ xorl %esi, %ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh, %al
+ movb %dl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %ecx
+
+ /* Round 6 */
+ rorl $16, %ecx
+ movl 28(%edi), %esi
+ movb %ch, %al
+ movb %cl, %bl
+ rorl $16, %ecx
+ xorl %esi, %edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch, %al
+ movb %cl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %edx
+
+ /* Round 7 */
+ rorl $16, %edx
+ movl 32(%edi), %esi
+ movb %dh, %al
+ movb %dl, %bl
+ rorl $16, %edx
+ xorl %esi, %ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh, %al
+ movb %dl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %ecx
+
+ /* Round 8 */
+ rorl $16, %ecx
+ movl 36(%edi), %esi
+ movb %ch, %al
+ movb %cl, %bl
+ rorl $16, %ecx
+ xorl %esi, %edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch, %al
+ movb %cl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %edx
+
+ /* Round 9 */
+ rorl $16, %edx
+ movl 40(%edi), %esi
+ movb %dh, %al
+ movb %dl, %bl
+ rorl $16, %edx
+ xorl %esi, %ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh, %al
+ movb %dl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %ecx
+
+ /* Round 10 */
+ rorl $16, %ecx
+ movl 44(%edi), %esi
+ movb %ch, %al
+ movb %cl, %bl
+ rorl $16, %ecx
+ xorl %esi, %edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch, %al
+ movb %cl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %edx
+
+ /* Round 11 */
+ rorl $16, %edx
+ movl 48(%edi), %esi
+ movb %dh, %al
+ movb %dl, %bl
+ rorl $16, %edx
+ xorl %esi, %ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh, %al
+ movb %dl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %ecx
+
+ /* Round 12 */
+ rorl $16, %ecx
+ movl 52(%edi), %esi
+ movb %ch, %al
+ movb %cl, %bl
+ rorl $16, %ecx
+ xorl %esi, %edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch, %al
+ movb %cl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %edx
+
+ /* Round 13 */
+ rorl $16, %edx
+ movl 56(%edi), %esi
+ movb %dh, %al
+ movb %dl, %bl
+ rorl $16, %edx
+ xorl %esi, %ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh, %al
+ movb %dl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %ecx
+
+ /* Round 14 */
+ rorl $16, %ecx
+ movl 60(%edi), %esi
+ movb %ch, %al
+ movb %cl, %bl
+ rorl $16, %ecx
+ xorl %esi, %edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch, %al
+ movb %cl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %edx
+
+ /* Round 15 */
+ rorl $16, %edx
+ movl 64(%edi), %esi
+ movb %dh, %al
+ movb %dl, %bl
+ rorl $16, %edx
+ xorl %esi, %ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh, %al
+ movb %dl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %ecx
+ xorl 68(%edi), %edx
+ movl 20(%esp), %eax
+ movl %edx, (%eax)
+ movl %ecx, 4(%eax)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.L_BF_encrypt_end:
+ .size _C_LABEL(BF_encrypt),.L_BF_encrypt_end-_C_LABEL(BF_encrypt)
+
+ENTRY(BF_decrypt)
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+
+
+ /* Load the 2 words */
+ movl 20(%esp), %eax
+ movl (%eax), %ecx
+ movl 4(%eax), %edx
+
+ /* P pointer, s and enc flag */
+ movl 24(%esp), %edi
+ xorl %eax, %eax
+ xorl %ebx, %ebx
+ xorl 68(%edi), %ecx
+
+ /* Round 16 */
+ rorl $16, %ecx
+ movl 64(%edi), %esi
+ movb %ch, %al
+ movb %cl, %bl
+ rorl $16, %ecx
+ xorl %esi, %edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch, %al
+ movb %cl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %edx
+
+ /* Round 15 */
+ rorl $16, %edx
+ movl 60(%edi), %esi
+ movb %dh, %al
+ movb %dl, %bl
+ rorl $16, %edx
+ xorl %esi, %ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh, %al
+ movb %dl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %ecx
+
+ /* Round 14 */
+ rorl $16, %ecx
+ movl 56(%edi), %esi
+ movb %ch, %al
+ movb %cl, %bl
+ rorl $16, %ecx
+ xorl %esi, %edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch, %al
+ movb %cl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %edx
+
+ /* Round 13 */
+ rorl $16, %edx
+ movl 52(%edi), %esi
+ movb %dh, %al
+ movb %dl, %bl
+ rorl $16, %edx
+ xorl %esi, %ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh, %al
+ movb %dl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %ecx
+
+ /* Round 12 */
+ rorl $16, %ecx
+ movl 48(%edi), %esi
+ movb %ch, %al
+ movb %cl, %bl
+ rorl $16, %ecx
+ xorl %esi, %edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch, %al
+ movb %cl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %edx
+
+ /* Round 11 */
+ rorl $16, %edx
+ movl 44(%edi), %esi
+ movb %dh, %al
+ movb %dl, %bl
+ rorl $16, %edx
+ xorl %esi, %ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh, %al
+ movb %dl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %ecx
+
+ /* Round 10 */
+ rorl $16, %ecx
+ movl 40(%edi), %esi
+ movb %ch, %al
+ movb %cl, %bl
+ rorl $16, %ecx
+ xorl %esi, %edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch, %al
+ movb %cl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %edx
+
+ /* Round 9 */
+ rorl $16, %edx
+ movl 36(%edi), %esi
+ movb %dh, %al
+ movb %dl, %bl
+ rorl $16, %edx
+ xorl %esi, %ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh, %al
+ movb %dl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %ecx
+
+ /* Round 8 */
+ rorl $16, %ecx
+ movl 32(%edi), %esi
+ movb %ch, %al
+ movb %cl, %bl
+ rorl $16, %ecx
+ xorl %esi, %edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch, %al
+ movb %cl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %edx
+
+ /* Round 7 */
+ rorl $16, %edx
+ movl 28(%edi), %esi
+ movb %dh, %al
+ movb %dl, %bl
+ rorl $16, %edx
+ xorl %esi, %ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh, %al
+ movb %dl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %ecx
+
+ /* Round 6 */
+ rorl $16, %ecx
+ movl 24(%edi), %esi
+ movb %ch, %al
+ movb %cl, %bl
+ rorl $16, %ecx
+ xorl %esi, %edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch, %al
+ movb %cl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %edx
+
+ /* Round 5 */
+ rorl $16, %edx
+ movl 20(%edi), %esi
+ movb %dh, %al
+ movb %dl, %bl
+ rorl $16, %edx
+ xorl %esi, %ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh, %al
+ movb %dl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %ecx
+
+ /* Round 4 */
+ rorl $16, %ecx
+ movl 16(%edi), %esi
+ movb %ch, %al
+ movb %cl, %bl
+ rorl $16, %ecx
+ xorl %esi, %edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch, %al
+ movb %cl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %edx
+
+ /* Round 3 */
+ rorl $16, %edx
+ movl 12(%edi), %esi
+ movb %dh, %al
+ movb %dl, %bl
+ rorl $16, %edx
+ xorl %esi, %ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh, %al
+ movb %dl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %ecx
+
+ /* Round 2 */
+ rorl $16, %ecx
+ movl 8(%edi), %esi
+ movb %ch, %al
+ movb %cl, %bl
+ rorl $16, %ecx
+ xorl %esi, %edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch, %al
+ movb %cl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %edx
+
+ /* Round 1 */
+ rorl $16, %edx
+ movl 4(%edi), %esi
+ movb %dh, %al
+ movb %dl, %bl
+ rorl $16, %edx
+ xorl %esi, %ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh, %al
+ movb %dl, %bl
+ addl %ebp, %esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax, %esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp, %esi
+ xorl %eax, %eax
+ xorl %esi, %ecx
+ xorl (%edi), %edx
+ movl 20(%esp), %eax
+ movl %edx, (%eax)
+ movl %ecx, 4(%eax)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+ .L_BF_decrypt_end:
+ .size _C_LABEL(BF_decrypt),.L_BF_decrypt_end-_C_LABEL(BF_decrypt)
diff --git a/sys/crypto/blowfish/bf_enc.c b/sys/crypto/blowfish/bf_enc.c
index 5edd6db..831db17 100644
--- a/sys/crypto/blowfish/bf_enc.c
+++ b/sys/crypto/blowfish/bf_enc.c
@@ -1,12 +1,12 @@
/* $FreeBSD$ */
-/* $KAME: bf_enc.c,v 1.5 2000/09/18 21:21:19 itojun Exp $ */
+/* $KAME: bf_enc.c,v 1.7 2002/02/27 01:33:59 itojun Exp $ */
/* crypto/bf/bf_enc.c */
-/* Copyright (C) 1995-1997 Eric Young (eay@mincom.oz.au)
+/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
* All rights reserved.
*
* This package is an SSL implementation written
- * by Eric Young (eay@mincom.oz.au).
+ * by Eric Young (eay@cryptsoft.com).
* The implementation was written so as to conform with Netscapes SSL.
*
* This library is free for commercial and non-commercial use as long as
@@ -14,7 +14,7 @@
* apply to all code found in this distribution, be it the RC4, RSA,
* lhash, DES, etc., code; not just the SSL code. The SSL documentation
* included with this distribution is covered by the same copyright terms
- * except that the holder is Tim Hudson (tjh@mincom.oz.au).
+ * except that the holder is Tim Hudson (tjh@cryptsoft.com).
*
* Copyright remains Eric Young's, and as such any Copyright notices in
* the code are not to be removed.
@@ -34,12 +34,12 @@
* 3. All advertising materials mentioning features or use of this software
* must display the following acknowledgement:
* "This product includes cryptographic software written by
- * Eric Young (eay@mincom.oz.au)"
+ * Eric Young (eay@cryptsoft.com)"
* The word 'cryptographic' can be left out if the rouines from the library
* being used are not cryptographic related :-).
* 4. If you include any Windows specific code (or a derivative thereof) from
* the apps directory (application code) you must include an acknowledgement:
- * "This product includes software written by Tim Hudson (tjh@mincom.oz.au)"
+ * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
*
* THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
@@ -75,10 +75,9 @@ to modify the code.
/* XXX "data" is host endian */
void
-BF_encrypt(data, key, encrypt)
+BF_encrypt(data, key)
BF_LONG *data;
BF_KEY *key;
- int encrypt;
{
register BF_LONG l, r, *p, *s;
@@ -87,57 +86,73 @@ BF_encrypt(data, key, encrypt)
l = data[0];
r = data[1];
- if (encrypt) {
- l^=p[0];
- BF_ENC(r, l, s, p[ 1]);
- BF_ENC(l, r, s, p[ 2]);
- BF_ENC(r, l, s, p[ 3]);
- BF_ENC(l, r, s, p[ 4]);
- BF_ENC(r, l, s, p[ 5]);
- BF_ENC(l, r, s, p[ 6]);
- BF_ENC(r, l, s, p[ 7]);
- BF_ENC(l, r, s, p[ 8]);
- BF_ENC(r, l, s, p[ 9]);
- BF_ENC(l, r, s, p[10]);
- BF_ENC(r, l, s, p[11]);
- BF_ENC(l, r, s, p[12]);
- BF_ENC(r, l, s, p[13]);
- BF_ENC(l, r, s, p[14]);
- BF_ENC(r, l, s, p[15]);
- BF_ENC(l, r, s, p[16]);
+ l^=p[0];
+ BF_ENC(r, l, s, p[ 1]);
+ BF_ENC(l, r, s, p[ 2]);
+ BF_ENC(r, l, s, p[ 3]);
+ BF_ENC(l, r, s, p[ 4]);
+ BF_ENC(r, l, s, p[ 5]);
+ BF_ENC(l, r, s, p[ 6]);
+ BF_ENC(r, l, s, p[ 7]);
+ BF_ENC(l, r, s, p[ 8]);
+ BF_ENC(r, l, s, p[ 9]);
+ BF_ENC(l, r, s, p[10]);
+ BF_ENC(r, l, s, p[11]);
+ BF_ENC(l, r, s, p[12]);
+ BF_ENC(r, l, s, p[13]);
+ BF_ENC(l, r, s, p[14]);
+ BF_ENC(r, l, s, p[15]);
+ BF_ENC(l, r, s, p[16]);
#if BF_ROUNDS == 20
- BF_ENC(r, l, s, p[17]);
- BF_ENC(l, r, s, p[18]);
- BF_ENC(r, l, s, p[19]);
- BF_ENC(l, r, s, p[20]);
+ BF_ENC(r, l, s, p[17]);
+ BF_ENC(l, r, s, p[18]);
+ BF_ENC(r, l, s, p[19]);
+ BF_ENC(l, r, s, p[20]);
#endif
- r ^= p[BF_ROUNDS + 1];
- } else {
- l ^= p[BF_ROUNDS + 1];
+ r ^= p[BF_ROUNDS + 1];
+
+ data[1] = l & 0xffffffff;
+ data[0] = r & 0xffffffff;
+}
+
+/* XXX "data" is host endian */
+void
+BF_decrypt(data, key)
+ BF_LONG *data;
+ BF_KEY *key;
+{
+ register BF_LONG l, r, *p, *s;
+
+ p = key->P;
+ s= &key->S[0];
+ l = data[0];
+ r = data[1];
+
+ l ^= p[BF_ROUNDS + 1];
#if BF_ROUNDS == 20
- BF_ENC(r, l, s, p[20]);
- BF_ENC(l, r, s, p[19]);
- BF_ENC(r, l, s, p[18]);
- BF_ENC(l, r, s, p[17]);
+ BF_ENC(r, l, s, p[20]);
+ BF_ENC(l, r, s, p[19]);
+ BF_ENC(r, l, s, p[18]);
+ BF_ENC(l, r, s, p[17]);
#endif
- BF_ENC(r, l, s, p[16]);
- BF_ENC(l, r, s, p[15]);
- BF_ENC(r, l, s, p[14]);
- BF_ENC(l, r, s, p[13]);
- BF_ENC(r, l, s, p[12]);
- BF_ENC(l, r, s, p[11]);
- BF_ENC(r, l, s, p[10]);
- BF_ENC(l, r, s, p[ 9]);
- BF_ENC(r, l, s, p[ 8]);
- BF_ENC(l, r, s, p[ 7]);
- BF_ENC(r, l, s, p[ 6]);
- BF_ENC(l, r, s, p[ 5]);
- BF_ENC(r, l, s, p[ 4]);
- BF_ENC(l, r, s, p[ 3]);
- BF_ENC(r, l, s, p[ 2]);
- BF_ENC(l, r, s, p[ 1]);
- r ^= p[0];
- }
+ BF_ENC(r, l, s, p[16]);
+ BF_ENC(l, r, s, p[15]);
+ BF_ENC(r, l, s, p[14]);
+ BF_ENC(l, r, s, p[13]);
+ BF_ENC(r, l, s, p[12]);
+ BF_ENC(l, r, s, p[11]);
+ BF_ENC(r, l, s, p[10]);
+ BF_ENC(l, r, s, p[ 9]);
+ BF_ENC(r, l, s, p[ 8]);
+ BF_ENC(l, r, s, p[ 7]);
+ BF_ENC(r, l, s, p[ 6]);
+ BF_ENC(l, r, s, p[ 5]);
+ BF_ENC(r, l, s, p[ 4]);
+ BF_ENC(l, r, s, p[ 3]);
+ BF_ENC(r, l, s, p[ 2]);
+ BF_ENC(l, r, s, p[ 1]);
+ r ^= p[0];
+
data[1] = l & 0xffffffff;
data[0] = r & 0xffffffff;
}
diff --git a/sys/crypto/blowfish/bf_locl.h b/sys/crypto/blowfish/bf_locl.h
index 52585bb..9314ff3 100644
--- a/sys/crypto/blowfish/bf_locl.h
+++ b/sys/crypto/blowfish/bf_locl.h
@@ -1,5 +1,5 @@
/* $FreeBSD$ */
-/* $KAME: bf_locl.h,v 1.5 2000/08/31 06:03:48 itojun Exp $ */
+/* $KAME: bf_locl.h,v 1.6 2001/09/10 04:03:56 itojun Exp $ */
/* crypto/bf/bf_local.h */
/* Copyright (C) 1995-1997 Eric Young (eay@mincom.oz.au)
@@ -163,7 +163,6 @@
*/
#undef BF_PTR
#undef BF_PTR2
-#ifdef __NetBSD__
#ifdef __i386__
#define BF_PTR2
#else
@@ -171,7 +170,6 @@
#define BF_PTR
#endif
#endif
-#endif /*NetBSD*/
#define BF_M 0x3fc
#define BF_0 22L
diff --git a/sys/crypto/blowfish/bf_pi.h b/sys/crypto/blowfish/bf_pi.h
index 8982aac..fdd5a27 100644
--- a/sys/crypto/blowfish/bf_pi.h
+++ b/sys/crypto/blowfish/bf_pi.h
@@ -1,5 +1,5 @@
/* $FreeBSD$ */
-/* $KAME: bf_pi.h,v 1.3 2000/03/27 04:36:26 sumikawa Exp $ */
+/* $KAME: bf_pi.h,v 1.4 2001/09/10 04:03:56 itojun Exp $ */
/* crypto/bf/bf_pi.h */
/* Copyright (C) 1995-1997 Eric Young (eay@mincom.oz.au)
@@ -59,7 +59,7 @@
* [including the GNU Public Licence.]
*/
-static BF_KEY bf_init= {
+static const BF_KEY bf_init= {
{
0x243f6a88L, 0x85a308d3L, 0x13198a2eL, 0x03707344L,
0xa4093822L, 0x299f31d0L, 0x082efa98L, 0xec4e6c89L,
diff --git a/sys/crypto/blowfish/bf_skey.c b/sys/crypto/blowfish/bf_skey.c
index 4bbe036..a93558c 100644
--- a/sys/crypto/blowfish/bf_skey.c
+++ b/sys/crypto/blowfish/bf_skey.c
@@ -1,5 +1,5 @@
/* $FreeBSD$ */
-/* $KAME: bf_skey.c,v 1.5 2000/11/06 13:58:08 itojun Exp $ */
+/* $KAME: bf_skey.c,v 1.7 2002/02/27 01:33:59 itojun Exp $ */
/* crypto/bf/bf_skey.c */
/* Copyright (C) 1995-1997 Eric Young (eay@mincom.oz.au)
@@ -76,7 +76,7 @@ BF_set_key(key, len, data)
BF_LONG *p, ri, in[2];
unsigned char *d, *end;
- memcpy((char *)key, (char *)&bf_init, sizeof(BF_KEY));
+ memcpy((char *)key, (const char *)&bf_init, sizeof(BF_KEY));
p = key->P;
if (len > ((BF_ROUNDS + 2) * 4))
@@ -106,14 +106,14 @@ BF_set_key(key, len, data)
in[0] = 0L;
in[1] = 0L;
for (i = 0; i < BF_ROUNDS + 2; i += 2) {
- BF_encrypt(in, key, BF_ENCRYPT);
+ BF_encrypt(in, key);
p[i ] = in[0];
p[i+1] = in[1];
}
p = key->S;
for (i = 0; i < 4 * 256; i += 2) {
- BF_encrypt(in, key, BF_ENCRYPT);
+ BF_encrypt(in, key);
p[i ] = in[0];
p[i+1] = in[1];
}
diff --git a/sys/crypto/blowfish/blowfish.h b/sys/crypto/blowfish/blowfish.h
index 76605f8..26b3e7c 100644
--- a/sys/crypto/blowfish/blowfish.h
+++ b/sys/crypto/blowfish/blowfish.h
@@ -1,5 +1,5 @@
/* $FreeBSD$ */
-/* $KAME: blowfish.h,v 1.10 2000/09/18 21:21:20 itojun Exp $ */
+/* $KAME: blowfish.h,v 1.12 2002/02/27 01:33:59 itojun Exp $ */
/* crypto/bf/blowfish.h */
/* Copyright (C) 1995-1997 Eric Young (eay@mincom.oz.au)
@@ -81,7 +81,11 @@ typedef struct bf_key_st {
} BF_KEY;
void BF_set_key __P((BF_KEY *, int, unsigned char *));
-void BF_encrypt __P((BF_LONG *, BF_KEY *, int));
+void BF_encrypt __P((BF_LONG *, BF_KEY *));
+void BF_decrypt __P((BF_LONG *, BF_KEY *));
+void BF_cbc_encrypt(const unsigned char *, unsigned char *, long,
+ const BF_KEY *, unsigned char *, int);
+
#ifdef __cplusplus
}
#endif
OpenPOWER on IntegriCloud