Remove obsolete files from SSLeay 0.8.

This commit is contained in:
Ulf Möller
1999-04-06 15:22:55 +00:00
parent 121bd68d1c
commit e8d628156f
30 changed files with 0 additions and 5366 deletions

View File

@@ -1,666 +0,0 @@
/* Don't even think of reading this code */
/* It was automatically generated by bf586.pl */
/* Which is a perl program used to generate the x86 assember for */
/* any of elf, a.out, Win32, or Solaris */
/* It can be found in SSLeay 0.7.0+ */
/* eric <eay@cryptsoft.com> */
.file "bfx86xxxx.s"
.version "01.01"
gcc2_compiled.:
.text
.align ALIGN
.globl BF_encrypt
TYPE(BF_encrypt,@function)
BF_encrypt:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
/* Load the 2 words */
movl 20(%esp), %eax
movl (%eax), %ecx
movl 4(%eax), %edx
/* P pointer, s and enc flag */
movl 24(%esp), %edi
xorl %eax, %eax
xorl %ebx, %ebx
movl 28(%esp), %ebp
cmpl $0, %ebp
je .L000start_decrypt
xorl (%edi), %ecx
/* Round 0 */
rorl $16, %ecx
movl 4(%edi), %esi
movb %ch, %al
movb %cl, %bl
rorl $16, %ecx
xorl %esi, %edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch, %al
movb %cl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %edx
/* Round 1 */
rorl $16, %edx
movl 8(%edi), %esi
movb %dh, %al
movb %dl, %bl
rorl $16, %edx
xorl %esi, %ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh, %al
movb %dl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %ecx
/* Round 2 */
rorl $16, %ecx
movl 12(%edi), %esi
movb %ch, %al
movb %cl, %bl
rorl $16, %ecx
xorl %esi, %edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch, %al
movb %cl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %edx
/* Round 3 */
rorl $16, %edx
movl 16(%edi), %esi
movb %dh, %al
movb %dl, %bl
rorl $16, %edx
xorl %esi, %ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh, %al
movb %dl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %ecx
/* Round 4 */
rorl $16, %ecx
movl 20(%edi), %esi
movb %ch, %al
movb %cl, %bl
rorl $16, %ecx
xorl %esi, %edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch, %al
movb %cl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %edx
/* Round 5 */
rorl $16, %edx
movl 24(%edi), %esi
movb %dh, %al
movb %dl, %bl
rorl $16, %edx
xorl %esi, %ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh, %al
movb %dl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %ecx
/* Round 6 */
rorl $16, %ecx
movl 28(%edi), %esi
movb %ch, %al
movb %cl, %bl
rorl $16, %ecx
xorl %esi, %edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch, %al
movb %cl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %edx
/* Round 7 */
rorl $16, %edx
movl 32(%edi), %esi
movb %dh, %al
movb %dl, %bl
rorl $16, %edx
xorl %esi, %ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh, %al
movb %dl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %ecx
/* Round 8 */
rorl $16, %ecx
movl 36(%edi), %esi
movb %ch, %al
movb %cl, %bl
rorl $16, %ecx
xorl %esi, %edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch, %al
movb %cl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %edx
/* Round 9 */
rorl $16, %edx
movl 40(%edi), %esi
movb %dh, %al
movb %dl, %bl
rorl $16, %edx
xorl %esi, %ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh, %al
movb %dl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %ecx
/* Round 10 */
rorl $16, %ecx
movl 44(%edi), %esi
movb %ch, %al
movb %cl, %bl
rorl $16, %ecx
xorl %esi, %edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch, %al
movb %cl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %edx
/* Round 11 */
rorl $16, %edx
movl 48(%edi), %esi
movb %dh, %al
movb %dl, %bl
rorl $16, %edx
xorl %esi, %ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh, %al
movb %dl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %ecx
/* Round 12 */
rorl $16, %ecx
movl 52(%edi), %esi
movb %ch, %al
movb %cl, %bl
rorl $16, %ecx
xorl %esi, %edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch, %al
movb %cl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %edx
/* Round 13 */
rorl $16, %edx
movl 56(%edi), %esi
movb %dh, %al
movb %dl, %bl
rorl $16, %edx
xorl %esi, %ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh, %al
movb %dl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %ecx
/* Round 14 */
rorl $16, %ecx
movl 60(%edi), %esi
movb %ch, %al
movb %cl, %bl
rorl $16, %ecx
xorl %esi, %edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch, %al
movb %cl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %edx
/* Round 15 */
rorl $16, %edx
movl 64(%edi), %esi
movb %dh, %al
movb %dl, %bl
rorl $16, %edx
xorl %esi, %ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh, %al
movb %dl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %ecx
xorl 68(%edi), %edx
movl 20(%esp), %eax
movl %edx, (%eax)
movl %ecx, 4(%eax)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.align ALIGN
.L000start_decrypt:
xorl 68(%edi), %ecx
/* Round 16 */
rorl $16, %ecx
movl 64(%edi), %esi
movb %ch, %al
movb %cl, %bl
rorl $16, %ecx
xorl %esi, %edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch, %al
movb %cl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %edx
/* Round 15 */
rorl $16, %edx
movl 60(%edi), %esi
movb %dh, %al
movb %dl, %bl
rorl $16, %edx
xorl %esi, %ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh, %al
movb %dl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %ecx
/* Round 14 */
rorl $16, %ecx
movl 56(%edi), %esi
movb %ch, %al
movb %cl, %bl
rorl $16, %ecx
xorl %esi, %edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch, %al
movb %cl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %edx
/* Round 13 */
rorl $16, %edx
movl 52(%edi), %esi
movb %dh, %al
movb %dl, %bl
rorl $16, %edx
xorl %esi, %ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh, %al
movb %dl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %ecx
/* Round 12 */
rorl $16, %ecx
movl 48(%edi), %esi
movb %ch, %al
movb %cl, %bl
rorl $16, %ecx
xorl %esi, %edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch, %al
movb %cl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %edx
/* Round 11 */
rorl $16, %edx
movl 44(%edi), %esi
movb %dh, %al
movb %dl, %bl
rorl $16, %edx
xorl %esi, %ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh, %al
movb %dl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %ecx
/* Round 10 */
rorl $16, %ecx
movl 40(%edi), %esi
movb %ch, %al
movb %cl, %bl
rorl $16, %ecx
xorl %esi, %edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch, %al
movb %cl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %edx
/* Round 9 */
rorl $16, %edx
movl 36(%edi), %esi
movb %dh, %al
movb %dl, %bl
rorl $16, %edx
xorl %esi, %ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh, %al
movb %dl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %ecx
/* Round 8 */
rorl $16, %ecx
movl 32(%edi), %esi
movb %ch, %al
movb %cl, %bl
rorl $16, %ecx
xorl %esi, %edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch, %al
movb %cl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %edx
/* Round 7 */
rorl $16, %edx
movl 28(%edi), %esi
movb %dh, %al
movb %dl, %bl
rorl $16, %edx
xorl %esi, %ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh, %al
movb %dl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %ecx
/* Round 6 */
rorl $16, %ecx
movl 24(%edi), %esi
movb %ch, %al
movb %cl, %bl
rorl $16, %ecx
xorl %esi, %edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch, %al
movb %cl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %edx
/* Round 5 */
rorl $16, %edx
movl 20(%edi), %esi
movb %dh, %al
movb %dl, %bl
rorl $16, %edx
xorl %esi, %ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh, %al
movb %dl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %ecx
/* Round 4 */
rorl $16, %ecx
movl 16(%edi), %esi
movb %ch, %al
movb %cl, %bl
rorl $16, %ecx
xorl %esi, %edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch, %al
movb %cl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %edx
/* Round 3 */
rorl $16, %edx
movl 12(%edi), %esi
movb %dh, %al
movb %dl, %bl
rorl $16, %edx
xorl %esi, %ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh, %al
movb %dl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %ecx
/* Round 2 */
rorl $16, %ecx
movl 8(%edi), %esi
movb %ch, %al
movb %cl, %bl
rorl $16, %ecx
xorl %esi, %edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch, %al
movb %cl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %edx
/* Round 1 */
rorl $16, %edx
movl 4(%edi), %esi
movb %dh, %al
movb %dl, %bl
rorl $16, %edx
xorl %esi, %ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh, %al
movb %dl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %ecx
xorl (%edi), %edx
movl 20(%esp), %eax
movl %edx, (%eax)
movl %ecx, 4(%eax)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.BF_encrypt_end:
SIZE(BF_encrypt,.BF_encrypt_end-BF_encrypt)
.ident "desasm.pl"

View File

@@ -1,663 +0,0 @@
; Don't even think of reading this code
; It was automatically generated by bf586.pl
; Which is a perl program used to generate the x86 assember for
; any of elf, a.out, Win32, or Solaris
; It can be found in SSLeay 0.7.0+
; eric <eay@cryptsoft.com>
;
TITLE bfx86xxxx.asm
.386
.model FLAT
_TEXT SEGMENT
PUBLIC _BF_encrypt
EXTRN _des_SPtrans:DWORD
_BF_encrypt PROC NEAR
push ebp
push ebx
push esi
push edi
;
; Load the 2 words
mov eax, DWORD PTR 20[esp]
mov ecx, DWORD PTR [eax]
mov edx, DWORD PTR 4[eax]
;
; P pointer, s and enc flag
mov edi, DWORD PTR 24[esp]
xor eax, eax
xor ebx, ebx
mov ebp, DWORD PTR 28[esp]
cmp ebp, 0
je $L000start_decrypt
xor ecx, DWORD PTR [edi]
;
; Round 0
ror ecx, 16
mov esi, DWORD PTR 4[edi]
mov al, ch
mov bl, cl
ror ecx, 16
xor edx, esi
mov esi, DWORD PTR 72[eax*4+edi]
mov ebp, DWORD PTR 1096[ebx*4+edi]
mov al, ch
mov bl, cl
add esi, ebp
mov eax, DWORD PTR 2120[eax*4+edi]
xor esi, eax
mov ebp, DWORD PTR 3144[ebx*4+edi]
add esi, ebp
xor eax, eax
xor edx, esi
;
; Round 1
ror edx, 16
mov esi, DWORD PTR 8[edi]
mov al, dh
mov bl, dl
ror edx, 16
xor ecx, esi
mov esi, DWORD PTR 72[eax*4+edi]
mov ebp, DWORD PTR 1096[ebx*4+edi]
mov al, dh
mov bl, dl
add esi, ebp
mov eax, DWORD PTR 2120[eax*4+edi]
xor esi, eax
mov ebp, DWORD PTR 3144[ebx*4+edi]
add esi, ebp
xor eax, eax
xor ecx, esi
;
; Round 2
ror ecx, 16
mov esi, DWORD PTR 12[edi]
mov al, ch
mov bl, cl
ror ecx, 16
xor edx, esi
mov esi, DWORD PTR 72[eax*4+edi]
mov ebp, DWORD PTR 1096[ebx*4+edi]
mov al, ch
mov bl, cl
add esi, ebp
mov eax, DWORD PTR 2120[eax*4+edi]
xor esi, eax
mov ebp, DWORD PTR 3144[ebx*4+edi]
add esi, ebp
xor eax, eax
xor edx, esi
;
; Round 3
ror edx, 16
mov esi, DWORD PTR 16[edi]
mov al, dh
mov bl, dl
ror edx, 16
xor ecx, esi
mov esi, DWORD PTR 72[eax*4+edi]
mov ebp, DWORD PTR 1096[ebx*4+edi]
mov al, dh
mov bl, dl
add esi, ebp
mov eax, DWORD PTR 2120[eax*4+edi]
xor esi, eax
mov ebp, DWORD PTR 3144[ebx*4+edi]
add esi, ebp
xor eax, eax
xor ecx, esi
;
; Round 4
ror ecx, 16
mov esi, DWORD PTR 20[edi]
mov al, ch
mov bl, cl
ror ecx, 16
xor edx, esi
mov esi, DWORD PTR 72[eax*4+edi]
mov ebp, DWORD PTR 1096[ebx*4+edi]
mov al, ch
mov bl, cl
add esi, ebp
mov eax, DWORD PTR 2120[eax*4+edi]
xor esi, eax
mov ebp, DWORD PTR 3144[ebx*4+edi]
add esi, ebp
xor eax, eax
xor edx, esi
;
; Round 5
ror edx, 16
mov esi, DWORD PTR 24[edi]
mov al, dh
mov bl, dl
ror edx, 16
xor ecx, esi
mov esi, DWORD PTR 72[eax*4+edi]
mov ebp, DWORD PTR 1096[ebx*4+edi]
mov al, dh
mov bl, dl
add esi, ebp
mov eax, DWORD PTR 2120[eax*4+edi]
xor esi, eax
mov ebp, DWORD PTR 3144[ebx*4+edi]
add esi, ebp
xor eax, eax
xor ecx, esi
;
; Round 6
ror ecx, 16
mov esi, DWORD PTR 28[edi]
mov al, ch
mov bl, cl
ror ecx, 16
xor edx, esi
mov esi, DWORD PTR 72[eax*4+edi]
mov ebp, DWORD PTR 1096[ebx*4+edi]
mov al, ch
mov bl, cl
add esi, ebp
mov eax, DWORD PTR 2120[eax*4+edi]
xor esi, eax
mov ebp, DWORD PTR 3144[ebx*4+edi]
add esi, ebp
xor eax, eax
xor edx, esi
;
; Round 7
ror edx, 16
mov esi, DWORD PTR 32[edi]
mov al, dh
mov bl, dl
ror edx, 16
xor ecx, esi
mov esi, DWORD PTR 72[eax*4+edi]
mov ebp, DWORD PTR 1096[ebx*4+edi]
mov al, dh
mov bl, dl
add esi, ebp
mov eax, DWORD PTR 2120[eax*4+edi]
xor esi, eax
mov ebp, DWORD PTR 3144[ebx*4+edi]
add esi, ebp
xor eax, eax
xor ecx, esi
;
; Round 8
ror ecx, 16
mov esi, DWORD PTR 36[edi]
mov al, ch
mov bl, cl
ror ecx, 16
xor edx, esi
mov esi, DWORD PTR 72[eax*4+edi]
mov ebp, DWORD PTR 1096[ebx*4+edi]
mov al, ch
mov bl, cl
add esi, ebp
mov eax, DWORD PTR 2120[eax*4+edi]
xor esi, eax
mov ebp, DWORD PTR 3144[ebx*4+edi]
add esi, ebp
xor eax, eax
xor edx, esi
;
; Round 9
ror edx, 16
mov esi, DWORD PTR 40[edi]
mov al, dh
mov bl, dl
ror edx, 16
xor ecx, esi
mov esi, DWORD PTR 72[eax*4+edi]
mov ebp, DWORD PTR 1096[ebx*4+edi]
mov al, dh
mov bl, dl
add esi, ebp
mov eax, DWORD PTR 2120[eax*4+edi]
xor esi, eax
mov ebp, DWORD PTR 3144[ebx*4+edi]
add esi, ebp
xor eax, eax
xor ecx, esi
;
; Round 10
ror ecx, 16
mov esi, DWORD PTR 44[edi]
mov al, ch
mov bl, cl
ror ecx, 16
xor edx, esi
mov esi, DWORD PTR 72[eax*4+edi]
mov ebp, DWORD PTR 1096[ebx*4+edi]
mov al, ch
mov bl, cl
add esi, ebp
mov eax, DWORD PTR 2120[eax*4+edi]
xor esi, eax
mov ebp, DWORD PTR 3144[ebx*4+edi]
add esi, ebp
xor eax, eax
xor edx, esi
;
; Round 11
ror edx, 16
mov esi, DWORD PTR 48[edi]
mov al, dh
mov bl, dl
ror edx, 16
xor ecx, esi
mov esi, DWORD PTR 72[eax*4+edi]
mov ebp, DWORD PTR 1096[ebx*4+edi]
mov al, dh
mov bl, dl
add esi, ebp
mov eax, DWORD PTR 2120[eax*4+edi]
xor esi, eax
mov ebp, DWORD PTR 3144[ebx*4+edi]
add esi, ebp
xor eax, eax
xor ecx, esi
;
; Round 12
ror ecx, 16
mov esi, DWORD PTR 52[edi]
mov al, ch
mov bl, cl
ror ecx, 16
xor edx, esi
mov esi, DWORD PTR 72[eax*4+edi]
mov ebp, DWORD PTR 1096[ebx*4+edi]
mov al, ch
mov bl, cl
add esi, ebp
mov eax, DWORD PTR 2120[eax*4+edi]
xor esi, eax
mov ebp, DWORD PTR 3144[ebx*4+edi]
add esi, ebp
xor eax, eax
xor edx, esi
;
; Round 13
ror edx, 16
mov esi, DWORD PTR 56[edi]
mov al, dh
mov bl, dl
ror edx, 16
xor ecx, esi
mov esi, DWORD PTR 72[eax*4+edi]
mov ebp, DWORD PTR 1096[ebx*4+edi]
mov al, dh
mov bl, dl
add esi, ebp
mov eax, DWORD PTR 2120[eax*4+edi]
xor esi, eax
mov ebp, DWORD PTR 3144[ebx*4+edi]
add esi, ebp
xor eax, eax
xor ecx, esi
;
; Round 14
ror ecx, 16
mov esi, DWORD PTR 60[edi]
mov al, ch
mov bl, cl
ror ecx, 16
xor edx, esi
mov esi, DWORD PTR 72[eax*4+edi]
mov ebp, DWORD PTR 1096[ebx*4+edi]
mov al, ch
mov bl, cl
add esi, ebp
mov eax, DWORD PTR 2120[eax*4+edi]
xor esi, eax
mov ebp, DWORD PTR 3144[ebx*4+edi]
add esi, ebp
xor eax, eax
xor edx, esi
;
; Round 15
ror edx, 16
mov esi, DWORD PTR 64[edi]
mov al, dh
mov bl, dl
ror edx, 16
xor ecx, esi
mov esi, DWORD PTR 72[eax*4+edi]
mov ebp, DWORD PTR 1096[ebx*4+edi]
mov al, dh
mov bl, dl
add esi, ebp
mov eax, DWORD PTR 2120[eax*4+edi]
xor esi, eax
mov ebp, DWORD PTR 3144[ebx*4+edi]
add esi, ebp
xor eax, eax
xor ecx, esi
xor edx, DWORD PTR 68[edi]
mov eax, DWORD PTR 20[esp]
mov DWORD PTR [eax],edx
mov DWORD PTR 4[eax],ecx
pop edi
pop esi
pop ebx
pop ebp
ret
$L000start_decrypt:
xor ecx, DWORD PTR 68[edi]
;
; Round 16
ror ecx, 16
mov esi, DWORD PTR 64[edi]
mov al, ch
mov bl, cl
ror ecx, 16
xor edx, esi
mov esi, DWORD PTR 72[eax*4+edi]
mov ebp, DWORD PTR 1096[ebx*4+edi]
mov al, ch
mov bl, cl
add esi, ebp
mov eax, DWORD PTR 2120[eax*4+edi]
xor esi, eax
mov ebp, DWORD PTR 3144[ebx*4+edi]
add esi, ebp
xor eax, eax
xor edx, esi
;
; Round 15
ror edx, 16
mov esi, DWORD PTR 60[edi]
mov al, dh
mov bl, dl
ror edx, 16
xor ecx, esi
mov esi, DWORD PTR 72[eax*4+edi]
mov ebp, DWORD PTR 1096[ebx*4+edi]
mov al, dh
mov bl, dl
add esi, ebp
mov eax, DWORD PTR 2120[eax*4+edi]
xor esi, eax
mov ebp, DWORD PTR 3144[ebx*4+edi]
add esi, ebp
xor eax, eax
xor ecx, esi
;
; Round 14
ror ecx, 16
mov esi, DWORD PTR 56[edi]
mov al, ch
mov bl, cl
ror ecx, 16
xor edx, esi
mov esi, DWORD PTR 72[eax*4+edi]
mov ebp, DWORD PTR 1096[ebx*4+edi]
mov al, ch
mov bl, cl
add esi, ebp
mov eax, DWORD PTR 2120[eax*4+edi]
xor esi, eax
mov ebp, DWORD PTR 3144[ebx*4+edi]
add esi, ebp
xor eax, eax
xor edx, esi
;
; Round 13
ror edx, 16
mov esi, DWORD PTR 52[edi]
mov al, dh
mov bl, dl
ror edx, 16
xor ecx, esi
mov esi, DWORD PTR 72[eax*4+edi]
mov ebp, DWORD PTR 1096[ebx*4+edi]
mov al, dh
mov bl, dl
add esi, ebp
mov eax, DWORD PTR 2120[eax*4+edi]
xor esi, eax
mov ebp, DWORD PTR 3144[ebx*4+edi]
add esi, ebp
xor eax, eax
xor ecx, esi
;
; Round 12
ror ecx, 16
mov esi, DWORD PTR 48[edi]
mov al, ch
mov bl, cl
ror ecx, 16
xor edx, esi
mov esi, DWORD PTR 72[eax*4+edi]
mov ebp, DWORD PTR 1096[ebx*4+edi]
mov al, ch
mov bl, cl
add esi, ebp
mov eax, DWORD PTR 2120[eax*4+edi]
xor esi, eax
mov ebp, DWORD PTR 3144[ebx*4+edi]
add esi, ebp
xor eax, eax
xor edx, esi
;
; Round 11
ror edx, 16
mov esi, DWORD PTR 44[edi]
mov al, dh
mov bl, dl
ror edx, 16
xor ecx, esi
mov esi, DWORD PTR 72[eax*4+edi]
mov ebp, DWORD PTR 1096[ebx*4+edi]
mov al, dh
mov bl, dl
add esi, ebp
mov eax, DWORD PTR 2120[eax*4+edi]
xor esi, eax
mov ebp, DWORD PTR 3144[ebx*4+edi]
add esi, ebp
xor eax, eax
xor ecx, esi
;
; Round 10
ror ecx, 16
mov esi, DWORD PTR 40[edi]
mov al, ch
mov bl, cl
ror ecx, 16
xor edx, esi
mov esi, DWORD PTR 72[eax*4+edi]
mov ebp, DWORD PTR 1096[ebx*4+edi]
mov al, ch
mov bl, cl
add esi, ebp
mov eax, DWORD PTR 2120[eax*4+edi]
xor esi, eax
mov ebp, DWORD PTR 3144[ebx*4+edi]
add esi, ebp
xor eax, eax
xor edx, esi
;
; Round 9
ror edx, 16
mov esi, DWORD PTR 36[edi]
mov al, dh
mov bl, dl
ror edx, 16
xor ecx, esi
mov esi, DWORD PTR 72[eax*4+edi]
mov ebp, DWORD PTR 1096[ebx*4+edi]
mov al, dh
mov bl, dl
add esi, ebp
mov eax, DWORD PTR 2120[eax*4+edi]
xor esi, eax
mov ebp, DWORD PTR 3144[ebx*4+edi]
add esi, ebp
xor eax, eax
xor ecx, esi
;
; Round 8
ror ecx, 16
mov esi, DWORD PTR 32[edi]
mov al, ch
mov bl, cl
ror ecx, 16
xor edx, esi
mov esi, DWORD PTR 72[eax*4+edi]
mov ebp, DWORD PTR 1096[ebx*4+edi]
mov al, ch
mov bl, cl
add esi, ebp
mov eax, DWORD PTR 2120[eax*4+edi]
xor esi, eax
mov ebp, DWORD PTR 3144[ebx*4+edi]
add esi, ebp
xor eax, eax
xor edx, esi
;
; Round 7
ror edx, 16
mov esi, DWORD PTR 28[edi]
mov al, dh
mov bl, dl
ror edx, 16
xor ecx, esi
mov esi, DWORD PTR 72[eax*4+edi]
mov ebp, DWORD PTR 1096[ebx*4+edi]
mov al, dh
mov bl, dl
add esi, ebp
mov eax, DWORD PTR 2120[eax*4+edi]
xor esi, eax
mov ebp, DWORD PTR 3144[ebx*4+edi]
add esi, ebp
xor eax, eax
xor ecx, esi
;
; Round 6
ror ecx, 16
mov esi, DWORD PTR 24[edi]
mov al, ch
mov bl, cl
ror ecx, 16
xor edx, esi
mov esi, DWORD PTR 72[eax*4+edi]
mov ebp, DWORD PTR 1096[ebx*4+edi]
mov al, ch
mov bl, cl
add esi, ebp
mov eax, DWORD PTR 2120[eax*4+edi]
xor esi, eax
mov ebp, DWORD PTR 3144[ebx*4+edi]
add esi, ebp
xor eax, eax
xor edx, esi
;
; Round 5
ror edx, 16
mov esi, DWORD PTR 20[edi]
mov al, dh
mov bl, dl
ror edx, 16
xor ecx, esi
mov esi, DWORD PTR 72[eax*4+edi]
mov ebp, DWORD PTR 1096[ebx*4+edi]
mov al, dh
mov bl, dl
add esi, ebp
mov eax, DWORD PTR 2120[eax*4+edi]
xor esi, eax
mov ebp, DWORD PTR 3144[ebx*4+edi]
add esi, ebp
xor eax, eax
xor ecx, esi
;
; Round 4
ror ecx, 16
mov esi, DWORD PTR 16[edi]
mov al, ch
mov bl, cl
ror ecx, 16
xor edx, esi
mov esi, DWORD PTR 72[eax*4+edi]
mov ebp, DWORD PTR 1096[ebx*4+edi]
mov al, ch
mov bl, cl
add esi, ebp
mov eax, DWORD PTR 2120[eax*4+edi]
xor esi, eax
mov ebp, DWORD PTR 3144[ebx*4+edi]
add esi, ebp
xor eax, eax
xor edx, esi
;
; Round 3
ror edx, 16
mov esi, DWORD PTR 12[edi]
mov al, dh
mov bl, dl
ror edx, 16
xor ecx, esi
mov esi, DWORD PTR 72[eax*4+edi]
mov ebp, DWORD PTR 1096[ebx*4+edi]
mov al, dh
mov bl, dl
add esi, ebp
mov eax, DWORD PTR 2120[eax*4+edi]
xor esi, eax
mov ebp, DWORD PTR 3144[ebx*4+edi]
add esi, ebp
xor eax, eax
xor ecx, esi
;
; Round 2
ror ecx, 16
mov esi, DWORD PTR 8[edi]
mov al, ch
mov bl, cl
ror ecx, 16
xor edx, esi
mov esi, DWORD PTR 72[eax*4+edi]
mov ebp, DWORD PTR 1096[ebx*4+edi]
mov al, ch
mov bl, cl
add esi, ebp
mov eax, DWORD PTR 2120[eax*4+edi]
xor esi, eax
mov ebp, DWORD PTR 3144[ebx*4+edi]
add esi, ebp
xor eax, eax
xor edx, esi
;
; Round 1
ror edx, 16
mov esi, DWORD PTR 4[edi]
mov al, dh
mov bl, dl
ror edx, 16
xor ecx, esi
mov esi, DWORD PTR 72[eax*4+edi]
mov ebp, DWORD PTR 1096[ebx*4+edi]
mov al, dh
mov bl, dl
add esi, ebp
mov eax, DWORD PTR 2120[eax*4+edi]
xor esi, eax
mov ebp, DWORD PTR 3144[ebx*4+edi]
add esi, ebp
xor eax, eax
xor ecx, esi
xor edx, DWORD PTR [edi]
mov eax, DWORD PTR 20[esp]
mov DWORD PTR [eax],edx
mov DWORD PTR 4[eax],ecx
pop edi
pop esi
pop ebx
pop ebp
ret
_BF_encrypt ENDP
_TEXT ENDS
END