diff options
author | Ralf S. Engelschall <rse@openssl.org> | 1998-12-21 11:52:47 +0100 |
---|---|---|
committer | Ralf S. Engelschall <rse@openssl.org> | 1998-12-21 11:52:47 +0100 |
commit | d02b48c63a58ea4367a0e905979f140b7d090f86 (patch) | |
tree | 504f62ed3d84799f785b9cd9fab255a21b0e1b0e /crypto/bf/asm | |
download | openssl-d02b48c63a58ea4367a0e905979f140b7d090f86.tar.xz openssl-d02b48c63a58ea4367a0e905979f140b7d090f86.zip |
Import of old SSLeay release: SSLeay 0.8.1b
Diffstat (limited to 'crypto/bf/asm')
-rw-r--r-- | crypto/bf/asm/b-win32.asm | 662 | ||||
-rw-r--r-- | crypto/bf/asm/bf586.pl | 159 | ||||
-rw-r--r-- | crypto/bf/asm/bx86-cpp.s | 666 | ||||
-rw-r--r-- | crypto/bf/asm/bx86unix.cpp | 33 | ||||
-rw-r--r-- | crypto/bf/asm/readme | 3 | ||||
-rw-r--r-- | crypto/bf/asm/win32.asm | 663 |
6 files changed, 2186 insertions, 0 deletions
diff --git a/crypto/bf/asm/b-win32.asm b/crypto/bf/asm/b-win32.asm new file mode 100644 index 0000000000..bef272eebb --- /dev/null +++ b/crypto/bf/asm/b-win32.asm @@ -0,0 +1,662 @@ + ; Don't even think of reading this code
+ ; It was automatically generated by bf586.pl
+ ; Which is a perl program used to generate the x86 assember for
+ ; any of elf, a.out, Win32, or Solaris
+ ; It can be found in SSLeay 0.7.0+
+ ; eric <eay@cryptsoft.com>
+ ;
+ TITLE bfx86xxxx.asm
+ .386
+.model FLAT
+_TEXT SEGMENT
+PUBLIC _BF_encrypt
+_BF_encrypt PROC NEAR
+ push ebp
+ push ebx
+ push esi
+ push edi
+ ;
+ ; Load the 2 words
+ mov eax, DWORD PTR 20[esp]
+ mov ecx, DWORD PTR [eax]
+ mov edx, DWORD PTR 4[eax]
+ ;
+ ; P pointer, s and enc flag
+ mov edi, DWORD PTR 24[esp]
+ xor eax, eax
+ xor ebx, ebx
+ mov ebp, DWORD PTR 28[esp]
+ cmp ebp, 0
+ je $L000start_decrypt
+ xor ecx, DWORD PTR [edi]
+ ;
+ ; Round 0
+ ror ecx, 16
+ mov esi, DWORD PTR 4[edi]
+ mov al, ch
+ mov bl, cl
+ ror ecx, 16
+ xor edx, esi
+ mov esi, DWORD PTR 72[eax*4+edi]
+ mov ebp, DWORD PTR 1096[ebx*4+edi]
+ mov al, ch
+ mov bl, cl
+ add esi, ebp
+ mov eax, DWORD PTR 2120[eax*4+edi]
+ xor esi, eax
+ mov ebp, DWORD PTR 3144[ebx*4+edi]
+ add esi, ebp
+ xor eax, eax
+ xor edx, esi
+ ;
+ ; Round 1
+ ror edx, 16
+ mov esi, DWORD PTR 8[edi]
+ mov al, dh
+ mov bl, dl
+ ror edx, 16
+ xor ecx, esi
+ mov esi, DWORD PTR 72[eax*4+edi]
+ mov ebp, DWORD PTR 1096[ebx*4+edi]
+ mov al, dh
+ mov bl, dl
+ add esi, ebp
+ mov eax, DWORD PTR 2120[eax*4+edi]
+ xor esi, eax
+ mov ebp, DWORD PTR 3144[ebx*4+edi]
+ add esi, ebp
+ xor eax, eax
+ xor ecx, esi
+ ;
+ ; Round 2
+ ror ecx, 16
+ mov esi, DWORD PTR 12[edi]
+ mov al, ch
+ mov bl, cl
+ ror ecx, 16
+ xor edx, esi
+ mov esi, DWORD PTR 72[eax*4+edi]
+ mov ebp, DWORD PTR 1096[ebx*4+edi]
+ mov al, ch
+ mov bl, cl
+ add esi, ebp
+ mov eax, DWORD PTR 2120[eax*4+edi]
+ xor esi, eax
+ mov ebp, DWORD PTR 3144[ebx*4+edi]
+ add esi, ebp
+ xor eax, eax
+ xor edx, esi
+ ;
+ ; Round 3
+ ror edx, 16
+ mov esi, DWORD PTR 16[edi]
+ mov al, dh
+ mov bl, dl
+ ror edx, 16
+ xor ecx, esi
+ mov esi, DWORD PTR 72[eax*4+edi]
+ mov ebp, DWORD PTR 1096[ebx*4+edi]
+ mov al, dh
+ mov bl, dl
+ add esi, ebp
+ mov eax, DWORD PTR 2120[eax*4+edi]
+ xor esi, eax
+ mov ebp, DWORD PTR 3144[ebx*4+edi]
+ add esi, ebp
+ xor eax, eax
+ xor ecx, esi
+ ;
+ ; Round 4
+ ror ecx, 16
+ mov esi, DWORD PTR 20[edi]
+ mov al, ch
+ mov bl, cl
+ ror ecx, 16
+ xor edx, esi
+ mov esi, DWORD PTR 72[eax*4+edi]
+ mov ebp, DWORD PTR 1096[ebx*4+edi]
+ mov al, ch
+ mov bl, cl
+ add esi, ebp
+ mov eax, DWORD PTR 2120[eax*4+edi]
+ xor esi, eax
+ mov ebp, DWORD PTR 3144[ebx*4+edi]
+ add esi, ebp
+ xor eax, eax
+ xor edx, esi
+ ;
+ ; Round 5
+ ror edx, 16
+ mov esi, DWORD PTR 24[edi]
+ mov al, dh
+ mov bl, dl
+ ror edx, 16
+ xor ecx, esi
+ mov esi, DWORD PTR 72[eax*4+edi]
+ mov ebp, DWORD PTR 1096[ebx*4+edi]
+ mov al, dh
+ mov bl, dl
+ add esi, ebp
+ mov eax, DWORD PTR 2120[eax*4+edi]
+ xor esi, eax
+ mov ebp, DWORD PTR 3144[ebx*4+edi]
+ add esi, ebp
+ xor eax, eax
+ xor ecx, esi
+ ;
+ ; Round 6
+ ror ecx, 16
+ mov esi, DWORD PTR 28[edi]
+ mov al, ch
+ mov bl, cl
+ ror ecx, 16
+ xor edx, esi
+ mov esi, DWORD PTR 72[eax*4+edi]
+ mov ebp, DWORD PTR 1096[ebx*4+edi]
+ mov al, ch
+ mov bl, cl
+ add esi, ebp
+ mov eax, DWORD PTR 2120[eax*4+edi]
+ xor esi, eax
+ mov ebp, DWORD PTR 3144[ebx*4+edi]
+ add esi, ebp
+ xor eax, eax
+ xor edx, esi
+ ;
+ ; Round 7
+ ror edx, 16
+ mov esi, DWORD PTR 32[edi]
+ mov al, dh
+ mov bl, dl
+ ror edx, 16
+ xor ecx, esi
+ mov esi, DWORD PTR 72[eax*4+edi]
+ mov ebp, DWORD PTR 1096[ebx*4+edi]
+ mov al, dh
+ mov bl, dl
+ add esi, ebp
+ mov eax, DWORD PTR 2120[eax*4+edi]
+ xor esi, eax
+ mov ebp, DWORD PTR 3144[ebx*4+edi]
+ add esi, ebp
+ xor eax, eax
+ xor ecx, esi
+ ;
+ ; Round 8
+ ror ecx, 16
+ mov esi, DWORD PTR 36[edi]
+ mov al, ch
+ mov bl, cl
+ ror ecx, 16
+ xor edx, esi
+ mov esi, DWORD PTR 72[eax*4+edi]
+ mov ebp, DWORD PTR 1096[ebx*4+edi]
+ mov al, ch
+ mov bl, cl
+ add esi, ebp
+ mov eax, DWORD PTR 2120[eax*4+edi]
+ xor esi, eax
+ mov ebp, DWORD PTR 3144[ebx*4+edi]
+ add esi, ebp
+ xor eax, eax
+ xor edx, esi
+ ;
+ ; Round 9
+ ror edx, 16
+ mov esi, DWORD PTR 40[edi]
+ mov al, dh
+ mov bl, dl
+ ror edx, 16
+ xor ecx, esi
+ mov esi, DWORD PTR 72[eax*4+edi]
+ mov ebp, DWORD PTR 1096[ebx*4+edi]
+ mov al, dh
+ mov bl, dl
+ add esi, ebp
+ mov eax, DWORD PTR 2120[eax*4+edi]
+ xor esi, eax
+ mov ebp, DWORD PTR 3144[ebx*4+edi]
+ add esi, ebp
+ xor eax, eax
+ xor ecx, esi
+ ;
+ ; Round 10
+ ror ecx, 16
+ mov esi, DWORD PTR 44[edi]
+ mov al, ch
+ mov bl, cl
+ ror ecx, 16
+ xor edx, esi
+ mov esi, DWORD PTR 72[eax*4+edi]
+ mov ebp, DWORD PTR 1096[ebx*4+edi]
+ mov al, ch
+ mov bl, cl
+ add esi, ebp
+ mov eax, DWORD PTR 2120[eax*4+edi]
+ xor esi, eax
+ mov ebp, DWORD PTR 3144[ebx*4+edi]
+ add esi, ebp
+ xor eax, eax
+ xor edx, esi
+ ;
+ ; Round 11
+ ror edx, 16
+ mov esi, DWORD PTR 48[edi]
+ mov al, dh
+ mov bl, dl
+ ror edx, 16
+ xor ecx, esi
+ mov esi, DWORD PTR 72[eax*4+edi]
+ mov ebp, DWORD PTR 1096[ebx*4+edi]
+ mov al, dh
+ mov bl, dl
+ add esi, ebp
+ mov eax, DWORD PTR 2120[eax*4+edi]
+ xor esi, eax
+ mov ebp, DWORD PTR 3144[ebx*4+edi]
+ add esi, ebp
+ xor eax, eax
+ xor ecx, esi
+ ;
+ ; Round 12
+ ror ecx, 16
+ mov esi, DWORD PTR 52[edi]
+ mov al, ch
+ mov bl, cl
+ ror ecx, 16
+ xor edx, esi
+ mov esi, DWORD PTR 72[eax*4+edi]
+ mov ebp, DWORD PTR 1096[ebx*4+edi]
+ mov al, ch
+ mov bl, cl
+ add esi, ebp
+ mov eax, DWORD PTR 2120[eax*4+edi]
+ xor esi, eax
+ mov ebp, DWORD PTR 3144[ebx*4+edi]
+ add esi, ebp
+ xor eax, eax
+ xor edx, esi
+ ;
+ ; Round 13
+ ror edx, 16
+ mov esi, DWORD PTR 56[edi]
+ mov al, dh
+ mov bl, dl
+ ror edx, 16
+ xor ecx, esi
+ mov esi, DWORD PTR 72[eax*4+edi]
+ mov ebp, DWORD PTR 1096[ebx*4+edi]
+ mov al, dh
+ mov bl, dl
+ add esi, ebp
+ mov eax, DWORD PTR 2120[eax*4+edi]
+ xor esi, eax
+ mov ebp, DWORD PTR 3144[ebx*4+edi]
+ add esi, ebp
+ xor eax, eax
+ xor ecx, esi
+ ;
+ ; Round 14
+ ror ecx, 16
+ mov esi, DWORD PTR 60[edi]
+ mov al, ch
+ mov bl, cl
+ ror ecx, 16
+ xor edx, esi
+ mov esi, DWORD PTR 72[eax*4+edi]
+ mov ebp, DWORD PTR 1096[ebx*4+edi]
+ mov al, ch
+ mov bl, cl
+ add esi, ebp
+ mov eax, DWORD PTR 2120[eax*4+edi]
+ xor esi, eax
+ mov ebp, DWORD PTR 3144[ebx*4+edi]
+ add esi, ebp
+ xor eax, eax
+ xor edx, esi
+ ;
+ ; Round 15
+ ror edx, 16
+ mov esi, DWORD PTR 64[edi]
+ mov al, dh
+ mov bl, dl
+ ror edx, 16
+ xor ecx, esi
+ mov esi, DWORD PTR 72[eax*4+edi]
+ mov ebp, DWORD PTR 1096[ebx*4+edi]
+ mov al, dh
+ mov bl, dl
+ add esi, ebp
+ mov eax, DWORD PTR 2120[eax*4+edi]
+ xor esi, eax
+ mov ebp, DWORD PTR 3144[ebx*4+edi]
+ add esi, ebp
+ xor eax, eax
+ xor ecx, esi
+ xor edx, DWORD PTR 68[edi]
+ mov eax, DWORD PTR 20[esp]
+ mov DWORD PTR [eax],edx
+ mov DWORD PTR 4[eax],ecx
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+$L000start_decrypt:
+ xor ecx, DWORD PTR 68[edi]
+ ;
+ ; Round 16
+ ror ecx, 16
+ mov esi, DWORD PTR 64[edi]
+ mov al, ch
+ mov bl, cl
+ ror ecx, 16
+ xor edx, esi
+ mov esi, DWORD PTR 72[eax*4+edi]
+ mov ebp, DWORD PTR 1096[ebx*4+edi]
+ mov al, ch
+ mov bl, cl
+ add esi, ebp
+ mov eax, DWORD PTR 2120[eax*4+edi]
+ xor esi, eax
+ mov ebp, DWORD PTR 3144[ebx*4+edi]
+ add esi, ebp
+ xor eax, eax
+ xor edx, esi
+ ;
+ ; Round 15
+ ror edx, 16
+ mov esi, DWORD PTR 60[edi]
+ mov al, dh
+ mov bl, dl
+ ror edx, 16
+ xor ecx, esi
+ mov esi, DWORD PTR 72[eax*4+edi]
+ mov ebp, DWORD PTR 1096[ebx*4+edi]
+ mov al, dh
+ mov bl, dl
+ add esi, ebp
+ mov eax, DWORD PTR 2120[eax*4+edi]
+ xor esi, eax
+ mov ebp, DWORD PTR 3144[ebx*4+edi]
+ add esi, ebp
+ xor eax, eax
+ xor ecx, esi
+ ;
+ ; Round 14
+ ror ecx, 16
+ mov esi, DWORD PTR 56[edi]
+ mov al, ch
+ mov bl, cl
+ ror ecx, 16
+ xor edx, esi
+ mov esi, DWORD PTR 72[eax*4+edi]
+ mov ebp, DWORD PTR 1096[ebx*4+edi]
+ mov al, ch
+ mov bl, cl
+ add esi, ebp
+ mov eax, DWORD PTR 2120[eax*4+edi]
+ xor esi, eax
+ mov ebp, DWORD PTR 3144[ebx*4+edi]
+ add esi, ebp
+ xor eax, eax
+ xor edx, esi
+ ;
+ ; Round 13
+ ror edx, 16
+ mov esi, DWORD PTR 52[edi]
+ mov al, dh
+ mov bl, dl
+ ror edx, 16
+ xor ecx, esi
+ mov esi, DWORD PTR 72[eax*4+edi]
+ mov ebp, DWORD PTR 1096[ebx*4+edi]
+ mov al, dh
+ mov bl, dl
+ add esi, ebp
+ mov eax, DWORD PTR 2120[eax*4+edi]
+ xor esi, eax
+ mov ebp, DWORD PTR 3144[ebx*4+edi]
+ add esi, ebp
+ xor eax, eax
+ xor ecx, esi
+ ;
+ ; Round 12
+ ror ecx, 16
+ mov esi, DWORD PTR 48[edi]
+ mov al, ch
+ mov bl, cl
+ ror ecx, 16
+ xor edx, esi
+ mov esi, DWORD PTR 72[eax*4+edi]
+ mov ebp, DWORD PTR 1096[ebx*4+edi]
+ mov al, ch
+ mov bl, cl
+ add esi, ebp
+ mov eax, DWORD PTR 2120[eax*4+edi]
+ xor esi, eax
+ mov ebp, DWORD PTR 3144[ebx*4+edi]
+ add esi, ebp
+ xor eax, eax
+ xor edx, esi
+ ;
+ ; Round 11
+ ror edx, 16
+ mov esi, DWORD PTR 44[edi]
+ mov al, dh
+ mov bl, dl
+ ror edx, 16
+ xor ecx, esi
+ mov esi, DWORD PTR 72[eax*4+edi]
+ mov ebp, DWORD PTR 1096[ebx*4+edi]
+ mov al, dh
+ mov bl, dl
+ add esi, ebp
+ mov eax, DWORD PTR 2120[eax*4+edi]
+ xor esi, eax
+ mov ebp, DWORD PTR 3144[ebx*4+edi]
+ add esi, ebp
+ xor eax, eax
+ xor ecx, esi
+ ;
+ ; Round 10
+ ror ecx, 16
+ mov esi, DWORD PTR 40[edi]
+ mov al, ch
+ mov bl, cl
+ ror ecx, 16
+ xor edx, esi
+ mov esi, DWORD PTR 72[eax*4+edi]
+ mov ebp, DWORD PTR 1096[ebx*4+edi]
+ mov al, ch
+ mov bl, cl
+ add esi, ebp
+ mov eax, DWORD PTR 2120[eax*4+edi]
+ xor esi, eax
+ mov ebp, DWORD PTR 3144[ebx*4+edi]
+ add esi, ebp
+ xor eax, eax
+ xor edx, esi
+ ;
+ ; Round 9
+ ror edx, 16
+ mov esi, DWORD PTR 36[edi]
+ mov al, dh
+ mov bl, dl
+ ror edx, 16
+ xor ecx, esi
+ mov esi, DWORD PTR 72[eax*4+edi]
+ mov ebp, DWORD PTR 1096[ebx*4+edi]
+ mov al, dh
+ mov bl, dl
+ add esi, ebp
+ mov eax, DWORD PTR 2120[eax*4+edi]
+ xor esi, eax
+ mov ebp, DWORD PTR 3144[ebx*4+edi]
+ add esi, ebp
+ xor eax, eax
+ xor ecx, esi
+ ;
+ ; Round 8
+ ror ecx, 16
+ mov esi, DWORD PTR 32[edi]
+ mov al, ch
+ mov bl, cl
+ ror ecx, 16
+ xor edx, esi
+ mov esi, DWORD PTR 72[eax*4+edi]
+ mov ebp, DWORD PTR 1096[ebx*4+edi]
+ mov al, ch
+ mov bl, cl
+ add esi, ebp
+ mov eax, DWORD PTR 2120[eax*4+edi]
+ xor esi, eax
+ mov ebp, DWORD PTR 3144[ebx*4+edi]
+ add esi, ebp
+ xor eax, eax
+ xor edx, esi
+ ;
+ ; Round 7
+ ror edx, 16
+ mov esi, DWORD PTR 28[edi]
+ mov al, dh
+ mov bl, dl
+ ror edx, 16
+ xor ecx, esi
+ mov esi, DWORD PTR 72[eax*4+edi]
+ mov ebp, DWORD PTR 1096[ebx*4+edi]
+ mov al, dh
+ mov bl, dl
+ add esi, ebp
+ mov eax, DWORD PTR 2120[eax*4+edi]
+ xor esi, eax
+ mov ebp, DWORD PTR 3144[ebx*4+edi]
+ add esi, ebp
+ xor eax, eax
+ xor ecx, esi
+ ;
+ ; Round 6
+ ror ecx, 16
+ mov esi, DWORD PTR 24[edi]
+ mov al, ch
+ mov bl, cl
+ ror ecx, 16
+ xor edx, esi
+ mov esi, DWORD PTR 72[eax*4+edi]
+ mov ebp, DWORD PTR 1096[ebx*4+edi]
+ mov al, ch
+ mov bl, cl
+ add esi, ebp
+ mov eax, DWORD PTR 2120[eax*4+edi]
+ xor esi, eax
+ mov ebp, DWORD PTR 3144[ebx*4+edi]
+ add esi, ebp
+ xor eax, eax
+ xor edx, esi
+ ;
+ ; Round 5
+ ror edx, 16
+ mov esi, DWORD PTR 20[edi]
+ mov al, dh
+ mov bl, dl
+ ror edx, 16
+ xor ecx, esi
+ mov esi, DWORD PTR 72[eax*4+edi]
+ mov ebp, DWORD PTR 1096[ebx*4+edi]
+ mov al, dh
+ mov bl, dl
+ add esi, ebp
+ mov eax, DWORD PTR 2120[eax*4+edi]
+ xor esi, eax
+ mov ebp, DWORD PTR 3144[ebx*4+edi]
+ add esi, ebp
+ xor eax, eax
+ xor ecx, esi
+ ;
+ ; Round 4
+ ror ecx, 16
+ mov esi, DWORD PTR 16[edi]
+ mov al, ch
+ mov bl, cl
+ ror ecx, 16
+ xor edx, esi
+ mov esi, DWORD PTR 72[eax*4+edi]
+ mov ebp, DWORD PTR 1096[ebx*4+edi]
+ mov al, ch
+ mov bl, cl
+ add esi, ebp
+ mov eax, DWORD PTR 2120[eax*4+edi]
+ xor esi, eax
+ mov ebp, DWORD PTR 3144[ebx*4+edi]
+ add esi, ebp
+ xor eax, eax
+ xor edx, esi
+ ;
+ ; Round 3
+ ror edx, 16
+ mov esi, DWORD PTR 12[edi]
+ mov al, dh
+ mov bl, dl
+ ror edx, 16
+ xor ecx, esi
+ mov esi, DWORD PTR 72[eax*4+edi]
+ mov ebp, DWORD PTR 1096[ebx*4+edi]
+ mov al, dh
+ mov bl, dl
+ add esi, ebp
+ mov eax, DWORD PTR 2120[eax*4+edi]
+ xor esi, eax
+ mov ebp, DWORD PTR 3144[ebx*4+edi]
+ add esi, ebp
+ xor eax, eax
+ xor ecx, esi
+ ;
+ ; Round 2
+ ror ecx, 16
+ mov esi, DWORD PTR 8[edi]
+ mov al, ch
+ mov bl, cl
+ ror ecx, 16
+ xor edx, esi
+ mov esi, DWORD PTR 72[eax*4+edi]
+ mov ebp, DWORD PTR 1096[ebx*4+edi]
+ mov al, ch
+ mov bl, cl
+ add esi, ebp
+ mov eax, DWORD PTR 2120[eax*4+edi]
+ xor esi, eax
+ mov ebp, DWORD PTR 3144[ebx*4+edi]
+ add esi, ebp
+ xor eax, eax
+ xor edx, esi
+ ;
+ ; Round 1
+ ror edx, 16
+ mov esi, DWORD PTR 4[edi]
+ mov al, dh
+ mov bl, dl
+ ror edx, 16
+ xor ecx, esi
+ mov esi, DWORD PTR 72[eax*4+edi]
+ mov ebp, DWORD PTR 1096[ebx*4+edi]
+ mov al, dh
+ mov bl, dl
+ add esi, ebp
+ mov eax, DWORD PTR 2120[eax*4+edi]
+ xor esi, eax
+ mov ebp, DWORD PTR 3144[ebx*4+edi]
+ add esi, ebp
+ xor eax, eax
+ xor ecx, esi
+ xor edx, DWORD PTR [edi]
+ mov eax, DWORD PTR 20[esp]
+ mov DWORD PTR [eax],edx
+ mov DWORD PTR 4[eax],ecx
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+_BF_encrypt ENDP
+_TEXT ENDS
+END
diff --git a/crypto/bf/asm/bf586.pl b/crypto/bf/asm/bf586.pl new file mode 100644 index 0000000000..bcb53cf3f5 --- /dev/null +++ b/crypto/bf/asm/bf586.pl @@ -0,0 +1,159 @@ +#!/usr/local/bin/perl +#!/usr/local/bin/perl
+
+$prog="bf586.pl";
+
+# base code is in microsft
+# op dest, source
+# format.
+#
+
+if ( ($ARGV[0] eq "elf"))
+ { require "x86unix.pl"; }
+elsif ( ($ARGV[0] eq "a.out"))
+ { $aout=1; require "x86unix.pl"; }
+elsif ( ($ARGV[0] eq "sol"))
+ { $sol=1; require "x86unix.pl"; }
+elsif ( ($ARGV[0] eq "cpp"))
+ { $cpp=1; require "x86unix.pl"; }
+elsif ( ($ARGV[0] eq "win32"))
+ { require "x86ms.pl"; }
+else
+ {
+ print STDERR <<"EOF";
+Pick one target type from
+ elf - linux, FreeBSD etc
+ a.out - old linux
+ sol - x86 solaris
+ cpp - format so x86unix.cpp can be used
+ win32 - Windows 95/Windows NT
+EOF
+ exit(1);
+ }
+
+&comment("Don't even think of reading this code");
+&comment("It was automatically generated by $prog");
+&comment("Which is a perl program used to generate the x86 assember for");
+&comment("any of elf, a.out, Win32, or Solaris");
+&comment("It can be found in SSLeay 0.7.0+");
+&comment("eric <eay\@cryptsoft.com>");
+&comment("");
+
+&file("bfx86xxxx");
+
+$BF_ROUNDS=16;
+$BF_OFF=($BF_ROUNDS+2)*4;
+$L="ecx";
+$R="edx";
+$P="edi";
+$tot="esi";
+$tmp1="eax";
+$tmp2="ebx";
+$tmp3="ebp";
+
+&des_encrypt("BF_encrypt");
+
+&file_end();
+
+sub des_encrypt
+ {
+ local($name)=@_;
+
+ &function_begin($name,3,"");
+
+ &comment("");
+ &comment("Load the 2 words");
+ &mov("eax",&wparam(0));
+ &mov($L,&DWP(0,"eax","",0));
+ &mov($R,&DWP(4,"eax","",0));
+
+ &comment("");
+ &comment("P pointer, s and enc flag");
+ &mov($P,&wparam(1));
+
+ &xor( $tmp1, $tmp1);
+ &xor( $tmp2, $tmp2);
+
+ # encrypting part
+
+ &mov("ebp",&wparam(2)); # get encrypt flag
+ &cmp("ebp","0");
+ &je(&label("start_decrypt"));
+
+ &xor($L,&DWP(0,$P,"",0));
+ for ($i=0; $i<$BF_ROUNDS; $i+=2)
+ {
+ &comment("");
+ &comment("Round $i");
+ &BF_ENCRYPT($i+1,$R,$L,$P,$tot,$tmp1,$tmp2,$tmp3);
+
+ &comment("");
+ &comment("Round ".sprintf("%d",$i+1));
+ &BF_ENCRYPT($i+2,$L,$R,$P,$tot,$tmp1,$tmp2,$tmp3);
+ }
+ &xor($R,&DWP(($BF_ROUNDS+1)*4,$P,"",0));
+
+ &mov("eax",&wparam(0));
+ &mov(&DWP(0,"eax","",0),$R);
+ &mov(&DWP(4,"eax","",0),$L);
+ &function_end_A($name);
+
+ &set_label("start_decrypt");
+
+ &xor($L,&DWP(($BF_ROUNDS+1)*4,$P,"",0));
+ for ($i=$BF_ROUNDS; $i>0; $i-=2)
+ {
+ &comment("");
+ &comment("Round $i");
+ &BF_ENCRYPT($i,$R,$L,$P,$tot,$tmp1,$tmp2,$tmp3);
+ &comment("");
+ &comment("Round ".sprintf("%d",$i-1));
+ &BF_ENCRYPT($i-1,$L,$R,$P,$tot,$tmp1,$tmp2,$tmp3);
+ }
+ &xor($R,&DWP(0,$P,"",0));
+
+ &mov("eax",&wparam(0));
+ &mov(&DWP(0,"eax","",0),$R);
+ &mov(&DWP(4,"eax","",0),$L);
+ &function_end_A($name);
+
+ &function_end_B($name);
+ }
+
+sub BF_ENCRYPT
+ {
+ local($i,$L,$R,$P,$tot,$tmp1,$tmp2,$tmp3)=@_;
+
+ &rotr( $R, 16);
+ &mov( $tot, &DWP(&n2a($i*4),$P,"",0));
+
+ &movb( &LB($tmp1), &HB($R));
+ &movb( &LB($tmp2), &LB($R));
+
+ &rotr( $R, 16);
+ &xor( $L, $tot);
+
+ &mov( $tot, &DWP(&n2a($BF_OFF+0x0000),$P,$tmp1,4));
+ &mov( $tmp3, &DWP(&n2a($BF_OFF+0x0400),$P,$tmp2,4));
+
+ &movb( &LB($tmp1), &HB($R));
+ &movb( &LB($tmp2), &LB($R));
+
+ &add( $tot, $tmp3);
+ &mov( $tmp1, &DWP(&n2a($BF_OFF+0x0800),$P,$tmp1,4)); # delay
+
+ &xor( $tot, $tmp1);
+ &mov( $tmp3, &DWP(&n2a($BF_OFF+0x0C00),$P,$tmp2,4));
+
+ &add( $tot, $tmp3);
+ &xor( $tmp1, $tmp1);
+
+ &xor( $L, $tot);
+ # delay
+ }
+
+sub n2a
+ {
+ sprintf("%d",$_[0]);
+ }
+
diff --git a/crypto/bf/asm/bx86-cpp.s b/crypto/bf/asm/bx86-cpp.s new file mode 100644 index 0000000000..0925137a6d --- /dev/null +++ b/crypto/bf/asm/bx86-cpp.s @@ -0,0 +1,666 @@ + /* Don't even think of reading this code */ + /* It was automatically generated by bf586.pl */ + /* Which is a perl program used to generate the x86 assember for */ + /* any of elf, a.out, Win32, or Solaris */ + /* It can be found in SSLeay 0.7.0+ */ + /* eric <eay@cryptsoft.com> */ + + .file "bfx86xxxx.s" + .version "01.01" +gcc2_compiled.: +.text + .align ALIGN +.globl BF_encrypt + TYPE(BF_encrypt,@function) +BF_encrypt: + pushl %ebp + pushl %ebx + pushl %esi + pushl %edi + + + /* Load the 2 words */ + movl 20(%esp), %eax + movl (%eax), %ecx + movl 4(%eax), %edx + + /* P pointer, s and enc flag */ + movl 24(%esp), %edi + xorl %eax, %eax + xorl %ebx, %ebx + movl 28(%esp), %ebp + cmpl $0, %ebp + je .L000start_decrypt + xorl (%edi), %ecx + + /* Round 0 */ + rorl $16, %ecx + movl 4(%edi), %esi + movb %ch, %al + movb %cl, %bl + rorl $16, %ecx + xorl %esi, %edx + movl 72(%edi,%eax,4),%esi + movl 1096(%edi,%ebx,4),%ebp + movb %ch, %al + movb %cl, %bl + addl %ebp, %esi + movl 2120(%edi,%eax,4),%eax + xorl %eax, %esi + movl 3144(%edi,%ebx,4),%ebp + addl %ebp, %esi + xorl %eax, %eax + xorl %esi, %edx + + /* Round 1 */ + rorl $16, %edx + movl 8(%edi), %esi + movb %dh, %al + movb %dl, %bl + rorl $16, %edx + xorl %esi, %ecx + movl 72(%edi,%eax,4),%esi + movl 1096(%edi,%ebx,4),%ebp + movb %dh, %al + movb %dl, %bl + addl %ebp, %esi + movl 2120(%edi,%eax,4),%eax + xorl %eax, %esi + movl 3144(%edi,%ebx,4),%ebp + addl %ebp, %esi + xorl %eax, %eax + xorl %esi, %ecx + + /* Round 2 */ + rorl $16, %ecx + movl 12(%edi), %esi + movb %ch, %al + movb %cl, %bl + rorl $16, %ecx + xorl %esi, %edx + movl 72(%edi,%eax,4),%esi + movl 1096(%edi,%ebx,4),%ebp + movb %ch, %al + movb %cl, %bl + addl %ebp, %esi + movl 2120(%edi,%eax,4),%eax + xorl %eax, %esi + movl 3144(%edi,%ebx,4),%ebp + addl %ebp, %esi + xorl %eax, %eax + xorl %esi, %edx + + /* Round 3 */ + rorl $16, %edx + movl 16(%edi), %esi + movb %dh, %al + movb %dl, %bl + rorl $16, %edx + xorl %esi, %ecx + movl 72(%edi,%eax,4),%esi + movl 1096(%edi,%ebx,4),%ebp + movb %dh, %al + movb %dl, %bl + addl %ebp, %esi + movl 2120(%edi,%eax,4),%eax + xorl %eax, %esi + movl 3144(%edi,%ebx,4),%ebp + addl %ebp, %esi + xorl %eax, %eax + xorl %esi, %ecx + + /* Round 4 */ + rorl $16, %ecx + movl 20(%edi), %esi + movb %ch, %al + movb %cl, %bl + rorl $16, %ecx + xorl %esi, %edx + movl 72(%edi,%eax,4),%esi + movl 1096(%edi,%ebx,4),%ebp + movb %ch, %al + movb %cl, %bl + addl %ebp, %esi + movl 2120(%edi,%eax,4),%eax + xorl %eax, %esi + movl 3144(%edi,%ebx,4),%ebp + addl %ebp, %esi + xorl %eax, %eax + xorl %esi, %edx + + /* Round 5 */ + rorl $16, %edx + movl 24(%edi), %esi + movb %dh, %al + movb %dl, %bl + rorl $16, %edx + xorl %esi, %ecx + movl 72(%edi,%eax,4),%esi + movl 1096(%edi,%ebx,4),%ebp + movb %dh, %al + movb %dl, %bl + addl %ebp, %esi + movl 2120(%edi,%eax,4),%eax + xorl %eax, %esi + movl 3144(%edi,%ebx,4),%ebp + addl %ebp, %esi + xorl %eax, %eax + xorl %esi, %ecx + + /* Round 6 */ + rorl $16, %ecx + movl 28(%edi), %esi + movb %ch, %al + movb %cl, %bl + rorl $16, %ecx + xorl %esi, %edx + movl 72(%edi,%eax,4),%esi + movl 1096(%edi,%ebx,4),%ebp + movb %ch, %al + movb %cl, %bl + addl %ebp, %esi + movl 2120(%edi,%eax,4),%eax + xorl %eax, %esi + movl 3144(%edi,%ebx,4),%ebp + addl %ebp, %esi + xorl %eax, %eax + xorl %esi, %edx + + /* Round 7 */ + rorl $16, %edx + movl 32(%edi), %esi + movb %dh, %al + movb %dl, %bl + rorl $16, %edx + xorl %esi, %ecx + movl 72(%edi,%eax,4),%esi + movl 1096(%edi,%ebx,4),%ebp + movb %dh, %al + movb %dl, %bl + addl %ebp, %esi + movl 2120(%edi,%eax,4),%eax + xorl %eax, %esi + movl 3144(%edi,%ebx,4),%ebp + addl %ebp, %esi + xorl %eax, %eax + xorl %esi, %ecx + + /* Round 8 */ + rorl $16, %ecx + movl 36(%edi), %esi + movb %ch, %al + movb %cl, %bl + rorl $16, %ecx + xorl %esi, %edx + movl 72(%edi,%eax,4),%esi + movl 1096(%edi,%ebx,4),%ebp + movb %ch, %al + movb %cl, %bl + addl %ebp, %esi + movl 2120(%edi,%eax,4),%eax + xorl %eax, %esi + movl 3144(%edi,%ebx,4),%ebp + addl %ebp, %esi + xorl %eax, %eax + xorl %esi, %edx + + /* Round 9 */ + rorl $16, %edx + movl 40(%edi), %esi + movb %dh, %al + movb %dl, %bl + rorl $16, %edx + xorl %esi, %ecx + movl 72(%edi,%eax,4),%esi + movl 1096(%edi,%ebx,4),%ebp + movb %dh, %al + movb %dl, %bl + addl %ebp, %esi + movl 2120(%edi,%eax,4),%eax + xorl %eax, %esi + movl 3144(%edi,%ebx,4),%ebp + addl %ebp, %esi + xorl %eax, %eax + xorl %esi, %ecx + + /* Round 10 */ + rorl $16, %ecx + movl 44(%edi), %esi + movb %ch, %al + movb %cl, %bl + rorl $16, %ecx + xorl %esi, %edx + movl 72(%edi,%eax,4),%esi + movl 1096(%edi,%ebx,4),%ebp + movb %ch, %al + movb %cl, %bl + addl %ebp, %esi + movl 2120(%edi,%eax,4),%eax + xorl %eax, %esi + movl 3144(%edi,%ebx,4),%ebp + addl %ebp, %esi + xorl %eax, %eax + xorl %esi, %edx + + /* Round 11 */ + rorl $16, %edx + movl 48(%edi), %esi + movb %dh, %al + movb %dl, %bl + rorl $16, %edx + xorl %esi, %ecx + movl 72(%edi,%eax,4),%esi + movl 1096(%edi,%ebx,4),%ebp + movb %dh, %al + movb %dl, %bl + addl %ebp, %esi + movl 2120(%edi,%eax,4),%eax + xorl %eax, %esi + movl 3144(%edi,%ebx,4),%ebp + addl %ebp, %esi + xorl %eax, %eax + xorl %esi, %ecx + + /* Round 12 */ + rorl $16, %ecx + movl 52(%edi), %esi + movb %ch, %al + movb %cl, %bl + rorl $16, %ecx + xorl %esi, %edx + movl 72(%edi,%eax,4),%esi + movl 1096(%edi,%ebx,4),%ebp + movb %ch, %al + movb %cl, %bl + addl %ebp, %esi + movl 2120(%edi,%eax,4),%eax + xorl %eax, %esi + movl 3144(%edi,%ebx,4),%ebp + addl %ebp, %esi + xorl %eax, %eax + xorl %esi, %edx + + /* Round 13 */ + rorl $16, %edx + movl 56(%edi), %esi + movb %dh, %al + movb %dl, %bl + rorl $16, %edx + xorl %esi, %ecx + movl 72(%edi,%eax,4),%esi + movl 1096(%edi,%ebx,4),%ebp + movb %dh, %al + movb %dl, %bl + addl %ebp, %esi + movl 2120(%edi,%eax,4),%eax + xorl %eax, %esi + movl 3144(%edi,%ebx,4),%ebp + addl %ebp, %esi + xorl %eax, %eax + xorl %esi, %ecx + + /* Round 14 */ + rorl $16, %ecx + movl 60(%edi), %esi + movb %ch, %al + movb %cl, %bl + rorl $16, %ecx + xorl %esi, %edx + movl 72(%edi,%eax,4),%esi + movl 1096(%edi,%ebx,4),%ebp + movb %ch, %al + movb %cl, %bl + addl %ebp, %esi + movl 2120(%edi,%eax,4),%eax + xorl %eax, %esi + movl 3144(%edi,%ebx,4),%ebp + addl %ebp, %esi + xorl %eax, %eax + xorl %esi, %edx + + /* Round 15 */ + rorl $16, %edx + movl 64(%edi), %esi + movb %dh, %al + movb %dl, %bl + rorl $16, %edx + xorl %esi, %ecx + movl 72(%edi,%eax,4),%esi + movl 1096(%edi,%ebx,4),%ebp + movb %dh, %al + movb %dl, %bl + addl %ebp, %esi + movl 2120(%edi,%eax,4),%eax + xorl %eax, %esi + movl 3144(%edi,%ebx,4),%ebp + addl %ebp, %esi + xorl %eax, %eax + xorl %esi, %ecx + xorl 68(%edi), %edx + movl 20(%esp), %eax + movl %edx, (%eax) + movl %ecx, 4(%eax) + popl %edi + popl %esi + popl %ebx + popl %ebp + ret +.align ALIGN +.L000start_decrypt: + xorl 68(%edi), %ecx + + /* Round 16 */ + rorl $16, %ecx + movl 64(%edi), %esi + movb %ch, %al + movb %cl, %bl + rorl $16, %ecx + xorl %esi, %edx + movl 72(%edi,%eax,4),%esi + movl 1096(%edi,%ebx,4),%ebp + movb %ch, %al + movb %cl, %bl + addl %ebp, %esi + movl 2120(%edi,%eax,4),%eax + xorl %eax, %esi + movl 3144(%edi,%ebx,4),%ebp + addl %ebp, %esi + xorl %eax, %eax + xorl %esi, %edx + + /* Round 15 */ + rorl $16, %edx + movl 60(%edi), %esi + movb %dh, %al + movb %dl, %bl + rorl $16, %edx + xorl %esi, %ecx + movl 72(%edi,%eax,4),%esi + movl 1096(%edi,%ebx,4),%ebp + movb %dh, %al + movb %dl, %bl + addl %ebp, %esi + movl 2120(%edi,%eax,4),%eax + xorl %eax, %esi + movl 3144(%edi,%ebx,4),%ebp + addl %ebp, %esi + xorl %eax, %eax + xorl %esi, %ecx + + /* Round 14 */ + rorl $16, %ecx + movl 56(%edi), %esi + movb %ch, %al + movb %cl, %bl + rorl $16, %ecx + xorl %esi, %edx + movl 72(%edi,%eax,4),%esi + movl 1096(%edi,%ebx,4),%ebp + movb %ch, %al + movb %cl, %bl + addl %ebp, %esi + movl 2120(%edi,%eax,4),%eax + xorl %eax, %esi + movl 3144(%edi,%ebx,4),%ebp + addl %ebp, %esi + xorl %eax, %eax + xorl %esi, %edx + + /* Round 13 */ + rorl $16, %edx + movl 52(%edi), %esi + movb %dh, %al + movb %dl, %bl + rorl $16, %edx + xorl %esi, %ecx + movl 72(%edi,%eax,4),%esi + movl 1096(%edi,%ebx,4),%ebp + movb %dh, %al + movb %dl, %bl + addl %ebp, %esi + movl 2120(%edi,%eax,4),%eax + xorl %eax, %esi + movl 3144(%edi,%ebx,4),%ebp + addl %ebp, %esi + xorl %eax, %eax + xorl %esi, %ecx + + /* Round 12 */ + rorl $16, %ecx + movl 48(%edi), %esi + movb %ch, %al + movb %cl, %bl + rorl $16, %ecx + xorl %esi, %edx + movl 72(%edi,%eax,4),%esi + movl 1096(%edi,%ebx,4),%ebp + movb %ch, %al + movb %cl, %bl + addl %ebp, %esi + movl 2120(%edi,%eax,4),%eax + xorl %eax, %esi + movl 3144(%edi,%ebx,4),%ebp + addl %ebp, %esi + xorl %eax, %eax + xorl %esi, %edx + + /* Round 11 */ + rorl $16, %edx + movl 44(%edi), %esi + movb %dh, %al + movb %dl, %bl + rorl $16, %edx + xorl %esi, %ecx + movl 72(%edi,%eax,4),%esi + movl 1096(%edi,%ebx,4),%ebp + movb %dh, %al + movb %dl, %bl + addl %ebp, %esi + movl 2120(%edi,%eax,4),%eax + xorl %eax, %esi + movl 3144(%edi,%ebx,4),%ebp + addl %ebp, %esi + xorl %eax, %eax + xorl %esi, %ecx + + /* Round 10 */ + rorl $16, %ecx + movl 40(%edi), %esi + movb %ch, %al + movb %cl, %bl + rorl $16, %ecx + xorl %esi, %edx + movl 72(%edi,%eax,4),%esi + movl 1096(%edi,%ebx,4),%ebp + movb %ch, %al + movb %cl, %bl + addl %ebp, %esi + movl 2120(%edi,%eax,4),%eax + xorl %eax, %esi + movl 3144(%edi,%ebx,4),%ebp + addl %ebp, %esi + xorl %eax, %eax + xorl %esi, %edx + + /* Round 9 */ + rorl $16, %edx + movl 36(%edi), %esi + movb %dh, %al + movb %dl, %bl + rorl $16, %edx + xorl %esi, %ecx + movl 72(%edi,%eax,4),%esi + movl 1096(%edi,%ebx,4),%ebp + movb %dh, %al + movb %dl, %bl + addl %ebp, %esi + movl 2120(%edi,%eax,4),%eax + xorl %eax, %esi + movl 3144(%edi,%ebx,4),%ebp + addl %ebp, %esi + xorl %eax, %eax + xorl %esi, %ecx + + /* Round 8 */ + rorl $16, %ecx + movl 32(%edi), %esi + movb %ch, %al + movb %cl, %bl + rorl $16, %ecx + xorl %esi, %edx + movl 72(%edi,%eax,4),%esi + movl 1096(%edi,%ebx,4),%ebp + movb %ch, %al + movb %cl, %bl + addl %ebp, %esi + movl 2120(%edi,%eax,4),%eax + xorl %eax, %esi + movl 3144(%edi,%ebx,4),%ebp + addl %ebp, %esi + xorl %eax, %eax + xorl %esi, %edx + + /* Round 7 */ + rorl $16, %edx + movl 28(%edi), %esi + movb %dh, %al + movb %dl, %bl + rorl $16, %edx + xorl %esi, %ecx + movl 72(%edi,%eax,4),%esi + movl 1096(%edi,%ebx,4),%ebp + movb %dh, %al + movb %dl, %bl + addl %ebp, %esi + movl 2120(%edi,%eax,4),%eax + xorl %eax, %esi + movl 3144(%edi,%ebx,4),%ebp + addl %ebp, %esi + xorl %eax, %eax + xorl %esi, %ecx + + /* Round 6 */ + rorl $16, %ecx + movl 24(%edi), %esi + movb %ch, %al + movb %cl, %bl + rorl $16, %ecx + xorl %esi, %edx + movl 72(%edi,%eax,4),%esi + movl 1096(%edi,%ebx,4),%ebp + movb %ch, %al + movb %cl, %bl + addl %ebp, %esi + movl 2120(%edi,%eax,4),%eax + xorl %eax, %esi + movl 3144(%edi,%ebx,4),%ebp + addl %ebp, %esi + xorl %eax, %eax + xorl %esi, %edx + + /* Round 5 */ + rorl $16, %edx + movl 20(%edi), %esi + movb %dh, %al + movb %dl, %bl + rorl $16, %edx + xorl %esi, %ecx + movl 72(%edi,%eax,4),%esi + movl 1096(%edi,%ebx,4),%ebp + movb %dh, %al + movb %dl, %bl + addl %ebp, %esi + movl 2120(%edi,%eax,4),%eax + xorl %eax, %esi + movl 3144(%edi,%ebx,4),%ebp + addl %ebp, %esi + xorl %eax, %eax + xorl %esi, %ecx + + /* Round 4 */ + rorl $16, %ecx + movl 16(%edi), %esi + movb %ch, %al + movb %cl, %bl + rorl $16, %ecx + xorl %esi, %edx + movl 72(%edi,%eax,4),%esi + movl 1096(%edi,%ebx,4),%ebp + movb %ch, %al + movb %cl, %bl + addl %ebp, %esi + movl 2120(%edi,%eax,4),%eax + xorl %eax, %esi + movl 3144(%edi,%ebx,4),%ebp + addl %ebp, %esi + xorl %eax, %eax + xorl %esi, %edx + + /* Round 3 */ + rorl $16, %edx + movl 12(%edi), %esi + movb %dh, %al + movb %dl, %bl + rorl $16, %edx + xorl %esi, %ecx + movl 72(%edi,%eax,4),%esi + movl 1096(%edi,%ebx,4),%ebp + movb %dh, %al + movb %dl, %bl + addl %ebp, %esi + movl 2120(%edi,%eax,4),%eax + xorl %eax, %esi + movl 3144(%edi,%ebx,4),%ebp + addl %ebp, %esi + xorl %eax, %eax + xorl %esi, %ecx + + /* Round 2 */ + rorl $16, %ecx + movl 8(%edi), %esi + movb %ch, %al + movb %cl, %bl + rorl $16, %ecx + xorl %esi, %edx + movl 72(%edi,%eax,4),%esi + movl 1096(%edi,%ebx,4),%ebp + movb %ch, %al + movb %cl, %bl + addl %ebp, %esi + movl 2120(%edi,%eax,4),%eax + xorl %eax, %esi + movl 3144(%edi,%ebx,4),%ebp + addl %ebp, %esi + xorl %eax, %eax + xorl %esi, %edx + + /* Round 1 */ + rorl $16, %edx + movl 4(%edi), %esi + movb %dh, %al + movb %dl, %bl + rorl $16, %edx + xorl %esi, %ecx + movl 72(%edi,%eax,4),%esi + movl 1096(%edi,%ebx,4),%ebp + movb %dh, %al + movb %dl, %bl + addl %ebp, %esi + movl 2120(%edi,%eax,4),%eax + xorl %eax, %esi + movl 3144(%edi,%ebx,4),%ebp + addl %ebp, %esi + xorl %eax, %eax + xorl %esi, %ecx + xorl (%edi), %edx + movl 20(%esp), %eax + movl %edx, (%eax) + movl %ecx, 4(%eax) + popl %edi + popl %esi + popl %ebx + popl %ebp + ret +.BF_encrypt_end: + SIZE(BF_encrypt,.BF_encrypt_end-BF_encrypt) +.ident "desasm.pl" diff --git a/crypto/bf/asm/bx86unix.cpp b/crypto/bf/asm/bx86unix.cpp new file mode 100644 index 0000000000..dcb10d23dd --- /dev/null +++ b/crypto/bf/asm/bx86unix.cpp @@ -0,0 +1,33 @@ + +#define TYPE(a,b) .type a,b +#define SIZE(a,b) .size a,b + +#ifdef OUT +#define OK 1 +#define BF_encrypt _BF_encrypt +#define ALIGN 4 +#endif + +#ifdef BSDI +#define OK 1 +#define BF_encrypt _BF_encrypt +#define ALIGN 4 +#undef SIZE +#undef TYPE +#endif + +#if defined(ELF) || defined(SOL) +#define OK 1 +#define ALIGN 16 +#endif + +#ifndef OK +You need to define one of +ELF - elf systems - linux-elf, NetBSD and DG-UX +OUT - a.out systems - linux-a.out and FreeBSD +SOL - solaris systems, which are elf with strange comment lines +BSDI - a.out with a very primative version of as. +#endif + +#include "bx86-cpp.s" + diff --git a/crypto/bf/asm/readme b/crypto/bf/asm/readme new file mode 100644 index 0000000000..71e4bb2d5d --- /dev/null +++ b/crypto/bf/asm/readme @@ -0,0 +1,3 @@ +If you want more of an idea of how this all works, +have a read of the readme file in SSLeay/crypto/des/asm. +SSLeay can be found at ftp://ftp.psy.uq.oz.au/pub/Crypto/SSL. diff --git a/crypto/bf/asm/win32.asm b/crypto/bf/asm/win32.asm new file mode 100644 index 0000000000..6d2333f323 --- /dev/null +++ b/crypto/bf/asm/win32.asm @@ -0,0 +1,663 @@ + ; Don't even think of reading this code + ; It was automatically generated by bf586.pl + ; Which is a perl program used to generate the x86 assember for + ; any of elf, a.out, Win32, or Solaris + ; It can be found in SSLeay 0.7.0+ + ; eric <eay@cryptsoft.com> + ; + TITLE bfx86xxxx.asm + .386 +.model FLAT +_TEXT SEGMENT +PUBLIC _BF_encrypt +EXTRN _des_SPtrans:DWORD +_BF_encrypt PROC NEAR + push ebp + push ebx + push esi + push edi + ; + ; Load the 2 words + mov eax, DWORD PTR 20[esp] + mov ecx, DWORD PTR [eax] + mov edx, DWORD PTR 4[eax] + ; + ; P pointer, s and enc flag + mov edi, DWORD PTR 24[esp] + xor eax, eax + xor ebx, ebx + mov ebp, DWORD PTR 28[esp] + cmp ebp, 0 + je $L000start_decrypt + xor ecx, DWORD PTR [edi] + ; + ; Round 0 + ror ecx, 16 + mov esi, DWORD PTR 4[edi] + mov al, ch + mov bl, cl + ror ecx, 16 + xor edx, esi + mov esi, DWORD PTR 72[eax*4+edi] + mov ebp, DWORD PTR 1096[ebx*4+edi] + mov al, ch + mov bl, cl + add esi, ebp + mov eax, DWORD PTR 2120[eax*4+edi] + xor esi, eax + mov ebp, DWORD PTR 3144[ebx*4+edi] + add esi, ebp + xor eax, eax + xor edx, esi + ; + ; Round 1 + ror edx, 16 + mov esi, DWORD PTR 8[edi] + mov al, dh + mov bl, dl + ror edx, 16 + xor ecx, esi + mov esi, DWORD PTR 72[eax*4+edi] + mov ebp, DWORD PTR 1096[ebx*4+edi] + mov al, dh + mov bl, dl + add esi, ebp + mov eax, DWORD PTR 2120[eax*4+edi] + xor esi, eax + mov ebp, DWORD PTR 3144[ebx*4+edi] + add esi, ebp + xor eax, eax + xor ecx, esi + ; + ; Round 2 + ror ecx, 16 + mov esi, DWORD PTR 12[edi] + mov al, ch + mov bl, cl + ror ecx, 16 + xor edx, esi + mov esi, DWORD PTR 72[eax*4+edi] + mov ebp, DWORD PTR 1096[ebx*4+edi] + mov al, ch + mov bl, cl + add esi, ebp + mov eax, DWORD PTR 2120[eax*4+edi] + xor esi, eax + mov ebp, DWORD PTR 3144[ebx*4+edi] + add esi, ebp + xor eax, eax + xor edx, esi + ; + ; Round 3 + ror edx, 16 + mov esi, DWORD PTR 16[edi] + mov al, dh + mov bl, dl + ror edx, 16 + xor ecx, esi + mov esi, DWORD PTR 72[eax*4+edi] + mov ebp, DWORD PTR 1096[ebx*4+edi] + mov al, dh + mov bl, dl + add esi, ebp + mov eax, DWORD PTR 2120[eax*4+edi] + xor esi, eax + mov ebp, DWORD PTR 3144[ebx*4+edi] + add esi, ebp + xor eax, eax + xor ecx, esi + ; + ; Round 4 + ror ecx, 16 + mov esi, DWORD PTR 20[edi] + mov al, ch + mov bl, cl + ror ecx, 16 + xor edx, esi + mov esi, DWORD PTR 72[eax*4+edi] + mov ebp, DWORD PTR 1096[ebx*4+edi] + mov al, ch + mov bl, cl + add esi, ebp + mov eax, DWORD PTR 2120[eax*4+edi] + xor esi, eax + mov ebp, DWORD PTR 3144[ebx*4+edi] + add esi, ebp + xor eax, eax + xor edx, esi + ; + ; Round 5 + ror edx, 16 + mov esi, DWORD PTR 24[edi] + mov al, dh + mov bl, dl + ror edx, 16 + xor ecx, esi + mov esi, DWORD PTR 72[eax*4+edi] + mov ebp, DWORD PTR 1096[ebx*4+edi] + mov al, dh + mov bl, dl + add esi, ebp + mov eax, DWORD PTR 2120[eax*4+edi] + xor esi, eax + mov ebp, DWORD PTR 3144[ebx*4+edi] + add esi, ebp + xor eax, eax + xor ecx, esi + ; + ; Round 6 + ror ecx, 16 + mov esi, DWORD PTR 28[edi] + mov al, ch + mov bl, cl + ror ecx, 16 + xor edx, esi + mov esi, DWORD PTR 72[eax*4+edi] + mov ebp, DWORD PTR 1096[ebx*4+edi] + mov al, ch + mov bl, cl + add esi, ebp + mov eax, DWORD PTR 2120[eax*4+edi] + xor esi, eax + mov ebp, DWORD PTR 3144[ebx*4+edi] + add esi, ebp + xor eax, eax + xor edx, esi + ; + ; Round 7 + ror edx, 16 + mov esi, DWORD PTR 32[edi] + mov al, dh + mov bl, dl + ror edx, 16 + xor ecx, esi + mov esi, DWORD PTR 72[eax*4+edi] + mov ebp, DWORD PTR 1096[ebx*4+edi] + mov al, dh + mov bl, dl + add esi, ebp + mov eax, DWORD PTR 2120[eax*4+edi] + xor esi, eax + mov ebp, DWORD PTR 3144[ebx*4+edi] + add esi, ebp + xor eax, eax + xor ecx, esi + ; + ; Round 8 + ror ecx, 16 + mov esi, DWORD PTR 36[edi] + mov al, ch + mov bl, cl + ror ecx, 16 + xor edx, esi + mov esi, DWORD PTR 72[eax*4+edi] + mov ebp, DWORD PTR 1096[ebx*4+edi] + mov al, ch + mov bl, cl + add esi, ebp + mov eax, DWORD PTR 2120[eax*4+edi] + xor esi, eax + mov ebp, DWORD PTR 3144[ebx*4+edi] + add esi, ebp + xor eax, eax + xor edx, esi + ; + ; Round 9 + ror edx, 16 + mov esi, DWORD PTR 40[edi] + mov al, dh + mov bl, dl + ror edx, 16 + xor ecx, esi + mov esi, DWORD PTR 72[eax*4+edi] + mov ebp, DWORD PTR 1096[ebx*4+edi] + mov al, dh + mov bl, dl + add esi, ebp + mov eax, DWORD PTR 2120[eax*4+edi] + xor esi, eax + mov ebp, DWORD PTR 3144[ebx*4+edi] + add esi, ebp + xor eax, eax + xor ecx, esi + ; + ; Round 10 + ror ecx, 16 + mov esi, DWORD PTR 44[edi] + mov al, ch + mov bl, cl + ror ecx, 16 + xor edx, esi + mov esi, DWORD PTR 72[eax*4+edi] + mov ebp, DWORD PTR 1096[ebx*4+edi] + mov al, ch + mov bl, cl + add esi, ebp + mov eax, DWORD PTR 2120[eax*4+edi] + xor esi, eax + mov ebp, DWORD PTR 3144[ebx*4+edi] + add esi, ebp + xor eax, eax + xor edx, esi + ; + ; Round 11 + ror edx, 16 + mov esi, DWORD PTR 48[edi] + mov al, dh + mov bl, dl + ror edx, 16 + xor ecx, esi + mov esi, DWORD PTR 72[eax*4+edi] + mov ebp, DWORD PTR 1096[ebx*4+edi] + mov al, dh + mov bl, dl + add esi, ebp + mov eax, DWORD PTR 2120[eax*4+edi] + xor esi, eax + mov ebp, DWORD PTR 3144[ebx*4+edi] + add esi, ebp + xor eax, eax + xor ecx, esi + ; + ; Round 12 + ror ecx, 16 + mov esi, DWORD PTR 52[edi] + mov al, ch + mov bl, cl + ror ecx, 16 + xor edx, esi + mov esi, DWORD PTR 72[eax*4+edi] + mov ebp, DWORD PTR 1096[ebx*4+edi] + mov al, ch + mov bl, cl + add esi, ebp + mov eax, DWORD PTR 2120[eax*4+edi] + xor esi, eax + mov ebp, DWORD PTR 3144[ebx*4+edi] + add esi, ebp + xor eax, eax + xor edx, esi + ; + ; Round 13 + ror edx, 16 + mov esi, DWORD PTR 56[edi] + mov al, dh + mov bl, dl + ror edx, 16 + xor ecx, esi + mov esi, DWORD PTR 72[eax*4+edi] + mov ebp, DWORD PTR 1096[ebx*4+edi] + mov al, dh + mov bl, dl + add esi, ebp + mov eax, DWORD PTR 2120[eax*4+edi] + xor esi, eax + mov ebp, DWORD PTR 3144[ebx*4+edi] + add esi, ebp + xor eax, eax + xor ecx, esi + ; + ; Round 14 + ror ecx, 16 + mov esi, DWORD PTR 60[edi] + mov al, ch + mov bl, cl + ror ecx, 16 + xor edx, esi + mov esi, DWORD PTR 72[eax*4+edi] + mov ebp, DWORD PTR 1096[ebx*4+edi] + mov al, ch + mov bl, cl + add esi, ebp + mov eax, DWORD PTR 2120[eax*4+edi] + xor esi, eax + mov ebp, DWORD PTR 3144[ebx*4+edi] + add esi, ebp + xor eax, eax + xor edx, esi + ; + ; Round 15 + ror edx, 16 + mov esi, DWORD PTR 64[edi] + mov al, dh + mov bl, dl + ror edx, 16 + xor ecx, esi + mov esi, DWORD PTR 72[eax*4+edi] + mov ebp, DWORD PTR 1096[ebx*4+edi] + mov al, dh + mov bl, dl + add esi, ebp + mov eax, DWORD PTR 2120[eax*4+edi] + xor esi, eax + mov ebp, DWORD PTR 3144[ebx*4+edi] + add esi, ebp + xor eax, eax + xor ecx, esi + xor edx, DWORD PTR 68[edi] + mov eax, DWORD PTR 20[esp] + mov DWORD PTR [eax],edx + mov DWORD PTR 4[eax],ecx + pop edi + pop esi + pop ebx + pop ebp + ret +$L000start_decrypt: + xor ecx, DWORD PTR 68[edi] + ; + ; Round 16 + ror ecx, 16 + mov esi, DWORD PTR 64[edi] + mov al, ch + mov bl, cl + ror ecx, 16 + xor edx, esi + mov esi, DWORD PTR 72[eax*4+edi] + mov ebp, DWORD PTR 1096[ebx*4+edi] + mov al, ch + mov bl, cl + add esi, ebp + mov eax, DWORD PTR 2120[eax*4+edi] + xor esi, eax + mov ebp, DWORD PTR 3144[ebx*4+edi] + add esi, ebp + xor eax, eax + xor edx, esi + ; + ; Round 15 + ror edx, 16 + mov esi, DWORD PTR 60[edi] + mov al, dh + mov bl, dl + ror edx, 16 + xor ecx, esi + mov esi, DWORD PTR 72[eax*4+edi] + mov ebp, DWORD PTR 1096[ebx*4+edi] + mov al, dh + mov bl, dl + add esi, ebp + mov eax, DWORD PTR 2120[eax*4+edi] + xor esi, eax + mov ebp, DWORD PTR 3144[ebx*4+edi] + add esi, ebp + xor eax, eax + xor ecx, esi + ; + ; Round 14 + ror ecx, 16 + mov esi, DWORD PTR 56[edi] + mov al, ch + mov bl, cl + ror ecx, 16 + xor edx, esi + mov esi, DWORD PTR 72[eax*4+edi] + mov ebp, DWORD PTR 1096[ebx*4+edi] + mov al, ch + mov bl, cl + add esi, ebp + mov eax, DWORD PTR 2120[eax*4+edi] + xor esi, eax + mov ebp, DWORD PTR 3144[ebx*4+edi] + add esi, ebp + xor eax, eax + xor edx, esi + ; + ; Round 13 + ror edx, 16 + mov esi, DWORD PTR 52[edi] + mov al, dh + mov bl, dl + ror edx, 16 + xor ecx, esi + mov esi, DWORD PTR 72[eax*4+edi] + mov ebp, DWORD PTR 1096[ebx*4+edi] + mov al, dh + mov bl, dl + add esi, ebp + mov eax, DWORD PTR 2120[eax*4+edi] + xor esi, eax + mov ebp, DWORD PTR 3144[ebx*4+edi] + add esi, ebp + xor eax, eax + xor ecx, esi + ; + ; Round 12 + ror ecx, 16 + mov esi, DWORD PTR 48[edi] + mov al, ch + mov bl, cl + ror ecx, 16 + xor edx, esi + mov esi, DWORD PTR 72[eax*4+edi] + mov ebp, DWORD PTR 1096[ebx*4+edi] + mov al, ch + mov bl, cl + add esi, ebp + mov eax, DWORD PTR 2120[eax*4+edi] + xor esi, eax + mov ebp, DWORD PTR 3144[ebx*4+edi] + add esi, ebp + xor eax, eax + xor edx, esi + ; + ; Round 11 + ror edx, 16 + mov esi, DWORD PTR 44[edi] + mov al, dh + mov bl, dl + ror edx, 16 + xor ecx, esi + mov esi, DWORD PTR 72[eax*4+edi] + mov ebp, DWORD PTR 1096[ebx*4+edi] + mov al, dh + mov bl, dl + add esi, ebp + mov eax, DWORD PTR 2120[eax*4+edi] + xor esi, eax + mov ebp, DWORD PTR 3144[ebx*4+edi] + add esi, ebp + xor eax, eax + xor ecx, esi + ; + ; Round 10 + ror ecx, 16 + mov esi, DWORD PTR 40[edi] + mov al, ch + mov bl, cl + ror ecx, 16 + xor edx, esi + mov esi, DWORD PTR 72[eax*4+edi] + mov ebp, DWORD PTR 1096[ebx*4+edi] + mov al, ch + mov bl, cl + add esi, ebp + mov eax, DWORD PTR 2120[eax*4+edi] + xor esi, eax + mov ebp, DWORD PTR 3144[ebx*4+edi] + add esi, ebp + xor eax, eax + xor edx, esi + ; + ; Round 9 + ror edx, 16 + mov esi, DWORD PTR 36[edi] + mov al, dh + mov bl, dl + ror edx, 16 + xor ecx, esi + mov esi, DWORD PTR 72[eax*4+edi] + mov ebp, DWORD PTR 1096[ebx*4+edi] + mov al, dh + mov bl, dl + add esi, ebp + mov eax, DWORD PTR 2120[eax*4+edi] + xor esi, eax + mov ebp, DWORD PTR 3144[ebx*4+edi] + add esi, ebp + xor eax, eax + xor ecx, esi + ; + ; Round 8 + ror ecx, 16 + mov esi, DWORD PTR 32[edi] + mov al, ch + mov bl, cl + ror ecx, 16 + xor edx, esi + mov esi, DWORD PTR 72[eax*4+edi] + mov ebp, DWORD PTR 1096[ebx*4+edi] + mov al, ch + mov bl, cl + add esi, ebp + mov eax, DWORD PTR 2120[eax*4+edi] + xor esi, eax + mov ebp, DWORD PTR 3144[ebx*4+edi] + add esi, ebp + xor eax, eax + xor edx, esi + ; + ; Round 7 + ror edx, 16 + mov esi, DWORD PTR 28[edi] + mov al, dh + mov bl, dl + ror edx, 16 + xor ecx, esi + mov esi, DWORD PTR 72[eax*4+edi] + mov ebp, DWORD PTR 1096[ebx*4+edi] + mov al, dh + mov bl, dl + add esi, ebp + mov eax, DWORD PTR 2120[eax*4+edi] + xor esi, eax + mov ebp, DWORD PTR 3144[ebx*4+edi] + add esi, ebp + xor eax, eax + xor ecx, esi + ; + ; Round 6 + ror ecx, 16 + mov esi, DWORD PTR 24[edi] + mov al, ch + mov bl, cl + ror ecx, 16 + xor edx, esi + mov esi, DWORD PTR 72[eax*4+edi] + mov ebp, DWORD PTR 1096[ebx*4+edi] + mov al, ch + mov bl, cl + add esi, ebp + mov eax, DWORD PTR 2120[eax*4+edi] + xor esi, eax + mov ebp, DWORD PTR 3144[ebx*4+edi] + add esi, ebp + xor eax, eax + xor edx, esi + ; + ; Round 5 + ror edx, 16 + mov esi, DWORD PTR 20[edi] + mov al, dh + mov bl, dl + ror edx, 16 + xor ecx, esi + mov esi, DWORD PTR 72[eax*4+edi] + mov ebp, DWORD PTR 1096[ebx*4+edi] + mov al, dh + mov bl, dl + add esi, ebp + mov eax, DWORD PTR 2120[eax*4+edi] + xor esi, eax + mov ebp, DWORD PTR 3144[ebx*4+edi] + add esi, ebp + xor eax, eax + xor ecx, esi + ; + ; Round 4 + ror ecx, 16 + mov esi, DWORD PTR 16[edi] + mov al, ch + mov bl, cl + ror ecx, 16 + xor edx, esi + mov esi, DWORD PTR 72[eax*4+edi] + mov ebp, DWORD PTR 1096[ebx*4+edi] + mov al, ch + mov bl, cl + add esi, ebp + mov eax, DWORD PTR 2120[eax*4+edi] + xor esi, eax + mov ebp, DWORD PTR 3144[ebx*4+edi] + add esi, ebp + xor eax, eax + xor edx, esi + ; + ; Round 3 + ror edx, 16 + mov esi, DWORD PTR 12[edi] + mov al, dh + mov bl, dl + ror edx, 16 + xor ecx, esi + mov esi, DWORD PTR 72[eax*4+edi] + mov ebp, DWORD PTR 1096[ebx*4+edi] + mov al, dh + mov bl, dl + add esi, ebp + mov eax, DWORD PTR 2120[eax*4+edi] + xor esi, eax + mov ebp, DWORD PTR 3144[ebx*4+edi] + add esi, ebp + xor eax, eax + xor ecx, esi + ; + ; Round 2 + ror ecx, 16 + mov esi, DWORD PTR 8[edi] + mov al, ch + mov bl, cl + ror ecx, 16 + xor edx, esi + mov esi, DWORD PTR 72[eax*4+edi] + mov ebp, DWORD PTR 1096[ebx*4+edi] + mov al, ch + mov bl, cl + add esi, ebp + mov eax, DWORD PTR 2120[eax*4+edi] + xor esi, eax + mov ebp, DWORD PTR 3144[ebx*4+edi] + add esi, ebp + xor eax, eax + xor edx, esi + ; + ; Round 1 + ror edx, 16 + mov esi, DWORD PTR 4[edi] + mov al, dh + mov bl, dl + ror edx, 16 + xor ecx, esi + mov esi, DWORD PTR 72[eax*4+edi] + mov ebp, DWORD PTR 1096[ebx*4+edi] + mov al, dh + mov bl, dl + add esi, ebp + mov eax, DWORD PTR 2120[eax*4+edi] + xor esi, eax + mov ebp, DWORD PTR 3144[ebx*4+edi] + add esi, ebp + xor eax, eax + xor ecx, esi + xor edx, DWORD PTR [edi] + mov eax, DWORD PTR 20[esp] + mov DWORD PTR [eax],edx + mov DWORD PTR 4[eax],ecx + pop edi + pop esi + pop ebx + pop ebp + ret +_BF_encrypt ENDP +_TEXT ENDS +END |