1998-12-21 10:56:39 +00:00
|
|
|
/* Run the C pre-processor over this file with one of the following defined
|
|
|
|
* ELF - elf object files,
|
|
|
|
* OUT - a.out object files,
|
|
|
|
* BSDI - BSDI style a.out object files
|
|
|
|
* SOL - Solaris style elf
|
|
|
|
*/
|
1998-12-21 10:52:47 +00:00
|
|
|
|
1998-12-21 10:56:39 +00:00
|
|
|
#define TYPE(a,b) .type a,b
|
|
|
|
#define SIZE(a,b) .size a,b
|
|
|
|
|
|
|
|
#if defined(OUT) || defined(BSDI)
|
|
|
|
#define des_SPtrans _des_SPtrans
|
|
|
|
#define des_encrypt _des_encrypt
|
|
|
|
#define des_encrypt2 _des_encrypt2
|
|
|
|
#define des_encrypt3 _des_encrypt3
|
|
|
|
#define des_decrypt3 _des_decrypt3
|
|
|
|
#define des_ncbc_encrypt _des_ncbc_encrypt
|
|
|
|
#define des_ede3_cbc_encrypt _des_ede3_cbc_encrypt
|
|
|
|
|
|
|
|
#endif
|
1998-12-21 10:52:47 +00:00
|
|
|
|
|
|
|
#ifdef OUT
|
1998-12-21 10:56:39 +00:00
|
|
|
#define OK 1
|
|
|
|
#define ALIGN 4
|
1998-12-21 10:52:47 +00:00
|
|
|
#endif
|
|
|
|
|
|
|
|
#ifdef BSDI
|
1998-12-21 10:56:39 +00:00
|
|
|
#define OK 1
|
|
|
|
#define ALIGN 4
|
1998-12-21 10:52:47 +00:00
|
|
|
#undef SIZE
|
|
|
|
#undef TYPE
|
1998-12-21 10:56:39 +00:00
|
|
|
#define SIZE(a,b)
|
|
|
|
#define TYPE(a,b)
|
1998-12-21 10:52:47 +00:00
|
|
|
#endif
|
|
|
|
|
|
|
|
#if defined(ELF) || defined(SOL)
|
1998-12-21 10:56:39 +00:00
|
|
|
#define OK 1
|
|
|
|
#define ALIGN 16
|
1998-12-21 10:52:47 +00:00
|
|
|
#endif
|
|
|
|
|
|
|
|
#ifndef OK
|
|
|
|
You need to define one of
|
|
|
|
ELF - elf systems - linux-elf, NetBSD and DG-UX
|
|
|
|
OUT - a.out systems - linux-a.out and FreeBSD
|
|
|
|
SOL - solaris systems, which are elf with strange comment lines
|
|
|
|
BSDI - a.out with a very primative version of as.
|
|
|
|
#endif
|
|
|
|
|
1998-12-21 10:56:39 +00:00
|
|
|
/* Let the Assembler begin :-) */
|
|
|
|
/* Don't even think of reading this code */
|
|
|
|
/* It was automatically generated by des-586.pl */
|
|
|
|
/* Which is a perl program used to generate the x86 assember for */
|
|
|
|
/* any of elf, a.out, BSDI,Win32, or Solaris */
|
|
|
|
/* eric <eay@cryptsoft.com> */
|
|
|
|
|
|
|
|
.file "des-586.s"
|
|
|
|
.version "01.01"
|
|
|
|
gcc2_compiled.:
|
|
|
|
.text
|
|
|
|
.align ALIGN
|
|
|
|
.globl des_encrypt
|
|
|
|
TYPE(des_encrypt,@function)
|
|
|
|
des_encrypt:
|
|
|
|
pushl %esi
|
|
|
|
pushl %edi
|
|
|
|
|
|
|
|
/* Load the 2 words */
|
|
|
|
movl 12(%esp), %esi
|
|
|
|
xorl %ecx, %ecx
|
|
|
|
pushl %ebx
|
|
|
|
pushl %ebp
|
|
|
|
movl (%esi), %eax
|
|
|
|
movl 28(%esp), %ebx
|
|
|
|
movl 4(%esi), %edi
|
|
|
|
|
|
|
|
/* IP */
|
|
|
|
roll $4, %eax
|
|
|
|
movl %eax, %esi
|
|
|
|
xorl %edi, %eax
|
|
|
|
andl $0xf0f0f0f0, %eax
|
|
|
|
xorl %eax, %esi
|
|
|
|
xorl %eax, %edi
|
|
|
|
|
|
|
|
roll $20, %edi
|
|
|
|
movl %edi, %eax
|
|
|
|
xorl %esi, %edi
|
|
|
|
andl $0xfff0000f, %edi
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %esi
|
|
|
|
|
|
|
|
roll $14, %eax
|
|
|
|
movl %eax, %edi
|
|
|
|
xorl %esi, %eax
|
|
|
|
andl $0x33333333, %eax
|
|
|
|
xorl %eax, %edi
|
|
|
|
xorl %eax, %esi
|
|
|
|
|
|
|
|
roll $22, %esi
|
|
|
|
movl %esi, %eax
|
|
|
|
xorl %edi, %esi
|
|
|
|
andl $0x03fc03fc, %esi
|
|
|
|
xorl %esi, %eax
|
|
|
|
xorl %esi, %edi
|
|
|
|
|
|
|
|
roll $9, %eax
|
|
|
|
movl %eax, %esi
|
|
|
|
xorl %edi, %eax
|
|
|
|
andl $0xaaaaaaaa, %eax
|
|
|
|
xorl %eax, %esi
|
|
|
|
xorl %eax, %edi
|
|
|
|
|
|
|
|
.byte 209
|
|
|
|
.byte 199 /* roll $1 %edi */
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
cmpl $0, %ebx
|
|
|
|
je .L000start_decrypt
|
|
|
|
|
|
|
|
/* Round 0 */
|
|
|
|
movl (%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 4(%ebp), %edx
|
|
|
|
xorl %esi, %eax
|
|
|
|
xorl %esi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
|
|
|
|
/* Round 1 */
|
|
|
|
movl 8(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 12(%ebp), %edx
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
|
|
|
|
/* Round 2 */
|
|
|
|
movl 16(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 20(%ebp), %edx
|
|
|
|
xorl %esi, %eax
|
|
|
|
xorl %esi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
|
|
|
|
/* Round 3 */
|
|
|
|
movl 24(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 28(%ebp), %edx
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
|
|
|
|
/* Round 4 */
|
|
|
|
movl 32(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 36(%ebp), %edx
|
|
|
|
xorl %esi, %eax
|
|
|
|
xorl %esi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
|
|
|
|
/* Round 5 */
|
|
|
|
movl 40(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 44(%ebp), %edx
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
|
|
|
|
/* Round 6 */
|
|
|
|
movl 48(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 52(%ebp), %edx
|
|
|
|
xorl %esi, %eax
|
|
|
|
xorl %esi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
|
|
|
|
/* Round 7 */
|
|
|
|
movl 56(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 60(%ebp), %edx
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
|
|
|
|
/* Round 8 */
|
|
|
|
movl 64(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 68(%ebp), %edx
|
|
|
|
xorl %esi, %eax
|
|
|
|
xorl %esi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
|
|
|
|
/* Round 9 */
|
|
|
|
movl 72(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 76(%ebp), %edx
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
|
|
|
|
/* Round 10 */
|
|
|
|
movl 80(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 84(%ebp), %edx
|
|
|
|
xorl %esi, %eax
|
|
|
|
xorl %esi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
|
|
|
|
/* Round 11 */
|
|
|
|
movl 88(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 92(%ebp), %edx
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
|
|
|
|
/* Round 12 */
|
|
|
|
movl 96(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 100(%ebp), %edx
|
|
|
|
xorl %esi, %eax
|
|
|
|
xorl %esi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
|
|
|
|
/* Round 13 */
|
|
|
|
movl 104(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 108(%ebp), %edx
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
|
|
|
|
/* Round 14 */
|
|
|
|
movl 112(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 116(%ebp), %edx
|
|
|
|
xorl %esi, %eax
|
|
|
|
xorl %esi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
|
|
|
|
/* Round 15 */
|
|
|
|
movl 120(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 124(%ebp), %edx
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
jmp .L001end
|
|
|
|
.L000start_decrypt:
|
|
|
|
|
|
|
|
/* Round 15 */
|
|
|
|
movl 120(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 124(%ebp), %edx
|
|
|
|
xorl %esi, %eax
|
|
|
|
xorl %esi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
|
|
|
|
/* Round 14 */
|
|
|
|
movl 112(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 116(%ebp), %edx
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
|
|
|
|
/* Round 13 */
|
|
|
|
movl 104(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 108(%ebp), %edx
|
|
|
|
xorl %esi, %eax
|
|
|
|
xorl %esi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
|
|
|
|
/* Round 12 */
|
|
|
|
movl 96(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 100(%ebp), %edx
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
|
|
|
|
/* Round 11 */
|
|
|
|
movl 88(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 92(%ebp), %edx
|
|
|
|
xorl %esi, %eax
|
|
|
|
xorl %esi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
|
|
|
|
/* Round 10 */
|
|
|
|
movl 80(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 84(%ebp), %edx
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
|
|
|
|
/* Round 9 */
|
|
|
|
movl 72(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 76(%ebp), %edx
|
|
|
|
xorl %esi, %eax
|
|
|
|
xorl %esi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
|
|
|
|
/* Round 8 */
|
|
|
|
movl 64(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 68(%ebp), %edx
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
|
|
|
|
/* Round 7 */
|
|
|
|
movl 56(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 60(%ebp), %edx
|
|
|
|
xorl %esi, %eax
|
|
|
|
xorl %esi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
|
|
|
|
/* Round 6 */
|
|
|
|
movl 48(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 52(%ebp), %edx
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
|
|
|
|
/* Round 5 */
|
|
|
|
movl 40(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 44(%ebp), %edx
|
|
|
|
xorl %esi, %eax
|
|
|
|
xorl %esi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
|
|
|
|
/* Round 4 */
|
|
|
|
movl 32(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 36(%ebp), %edx
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
|
|
|
|
/* Round 3 */
|
|
|
|
movl 24(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 28(%ebp), %edx
|
|
|
|
xorl %esi, %eax
|
|
|
|
xorl %esi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
|
|
|
|
/* Round 2 */
|
|
|
|
movl 16(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 20(%ebp), %edx
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
|
|
|
|
/* Round 1 */
|
|
|
|
movl 8(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 12(%ebp), %edx
|
|
|
|
xorl %esi, %eax
|
|
|
|
xorl %esi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
|
|
|
|
/* Round 0 */
|
|
|
|
movl (%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 4(%ebp), %edx
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
.L001end:
|
|
|
|
|
|
|
|
/* FP */
|
|
|
|
movl 20(%esp), %edx
|
|
|
|
.byte 209
|
|
|
|
.byte 206 /* rorl $1 %esi */
|
|
|
|
movl %edi, %eax
|
|
|
|
xorl %esi, %edi
|
|
|
|
andl $0xaaaaaaaa, %edi
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %esi
|
|
|
|
|
|
|
|
roll $23, %eax
|
|
|
|
movl %eax, %edi
|
|
|
|
xorl %esi, %eax
|
|
|
|
andl $0x03fc03fc, %eax
|
|
|
|
xorl %eax, %edi
|
|
|
|
xorl %eax, %esi
|
|
|
|
|
|
|
|
roll $10, %edi
|
|
|
|
movl %edi, %eax
|
|
|
|
xorl %esi, %edi
|
|
|
|
andl $0x33333333, %edi
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %esi
|
|
|
|
|
|
|
|
roll $18, %esi
|
|
|
|
movl %esi, %edi
|
|
|
|
xorl %eax, %esi
|
|
|
|
andl $0xfff0000f, %esi
|
|
|
|
xorl %esi, %edi
|
|
|
|
xorl %esi, %eax
|
|
|
|
|
|
|
|
roll $12, %edi
|
|
|
|
movl %edi, %esi
|
|
|
|
xorl %eax, %edi
|
|
|
|
andl $0xf0f0f0f0, %edi
|
|
|
|
xorl %edi, %esi
|
|
|
|
xorl %edi, %eax
|
|
|
|
|
|
|
|
rorl $4, %eax
|
|
|
|
movl %eax, (%edx)
|
|
|
|
movl %esi, 4(%edx)
|
|
|
|
popl %ebp
|
|
|
|
popl %ebx
|
|
|
|
popl %edi
|
|
|
|
popl %esi
|
|
|
|
ret
|
|
|
|
.des_encrypt_end:
|
|
|
|
SIZE(des_encrypt,.des_encrypt_end-des_encrypt)
|
|
|
|
.ident "desasm.pl"
|
|
|
|
.text
|
|
|
|
.align ALIGN
|
|
|
|
.globl des_encrypt2
|
|
|
|
TYPE(des_encrypt2,@function)
|
|
|
|
des_encrypt2:
|
|
|
|
pushl %esi
|
|
|
|
pushl %edi
|
|
|
|
|
|
|
|
/* Load the 2 words */
|
|
|
|
movl 12(%esp), %eax
|
|
|
|
xorl %ecx, %ecx
|
|
|
|
pushl %ebx
|
|
|
|
pushl %ebp
|
|
|
|
movl (%eax), %esi
|
|
|
|
movl 28(%esp), %ebx
|
|
|
|
roll $3, %esi
|
|
|
|
movl 4(%eax), %edi
|
|
|
|
roll $3, %edi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
cmpl $0, %ebx
|
|
|
|
je .L002start_decrypt
|
|
|
|
|
|
|
|
/* Round 0 */
|
|
|
|
movl (%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 4(%ebp), %edx
|
|
|
|
xorl %esi, %eax
|
|
|
|
xorl %esi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
|
|
|
|
/* Round 1 */
|
|
|
|
movl 8(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 12(%ebp), %edx
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
|
|
|
|
/* Round 2 */
|
|
|
|
movl 16(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 20(%ebp), %edx
|
|
|
|
xorl %esi, %eax
|
|
|
|
xorl %esi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
|
|
|
|
/* Round 3 */
|
|
|
|
movl 24(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 28(%ebp), %edx
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
|
|
|
|
/* Round 4 */
|
|
|
|
movl 32(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 36(%ebp), %edx
|
|
|
|
xorl %esi, %eax
|
|
|
|
xorl %esi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
|
|
|
|
/* Round 5 */
|
|
|
|
movl 40(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 44(%ebp), %edx
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
|
|
|
|
/* Round 6 */
|
|
|
|
movl 48(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 52(%ebp), %edx
|
|
|
|
xorl %esi, %eax
|
|
|
|
xorl %esi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
|
|
|
|
/* Round 7 */
|
|
|
|
movl 56(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 60(%ebp), %edx
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
|
|
|
|
/* Round 8 */
|
|
|
|
movl 64(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 68(%ebp), %edx
|
|
|
|
xorl %esi, %eax
|
|
|
|
xorl %esi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
|
|
|
|
/* Round 9 */
|
|
|
|
movl 72(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 76(%ebp), %edx
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
|
|
|
|
/* Round 10 */
|
|
|
|
movl 80(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 84(%ebp), %edx
|
|
|
|
xorl %esi, %eax
|
|
|
|
xorl %esi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
|
|
|
|
/* Round 11 */
|
|
|
|
movl 88(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 92(%ebp), %edx
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
|
|
|
|
/* Round 12 */
|
|
|
|
movl 96(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 100(%ebp), %edx
|
|
|
|
xorl %esi, %eax
|
|
|
|
xorl %esi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
|
|
|
|
/* Round 13 */
|
|
|
|
movl 104(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 108(%ebp), %edx
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
|
|
|
|
/* Round 14 */
|
|
|
|
movl 112(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 116(%ebp), %edx
|
|
|
|
xorl %esi, %eax
|
|
|
|
xorl %esi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
|
|
|
|
/* Round 15 */
|
|
|
|
movl 120(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 124(%ebp), %edx
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
jmp .L003end
|
|
|
|
.L002start_decrypt:
|
|
|
|
|
|
|
|
/* Round 15 */
|
|
|
|
movl 120(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 124(%ebp), %edx
|
|
|
|
xorl %esi, %eax
|
|
|
|
xorl %esi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
|
|
|
|
/* Round 14 */
|
|
|
|
movl 112(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 116(%ebp), %edx
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
|
|
|
|
/* Round 13 */
|
|
|
|
movl 104(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 108(%ebp), %edx
|
|
|
|
xorl %esi, %eax
|
|
|
|
xorl %esi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
|
|
|
|
/* Round 12 */
|
|
|
|
movl 96(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 100(%ebp), %edx
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
|
|
|
|
/* Round 11 */
|
|
|
|
movl 88(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 92(%ebp), %edx
|
|
|
|
xorl %esi, %eax
|
|
|
|
xorl %esi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
|
|
|
|
/* Round 10 */
|
|
|
|
movl 80(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 84(%ebp), %edx
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
|
|
|
|
/* Round 9 */
|
|
|
|
movl 72(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 76(%ebp), %edx
|
|
|
|
xorl %esi, %eax
|
|
|
|
xorl %esi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
|
|
|
|
/* Round 8 */
|
|
|
|
movl 64(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 68(%ebp), %edx
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
|
|
|
|
/* Round 7 */
|
|
|
|
movl 56(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 60(%ebp), %edx
|
|
|
|
xorl %esi, %eax
|
|
|
|
xorl %esi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
|
|
|
|
/* Round 6 */
|
|
|
|
movl 48(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 52(%ebp), %edx
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
|
|
|
|
/* Round 5 */
|
|
|
|
movl 40(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 44(%ebp), %edx
|
|
|
|
xorl %esi, %eax
|
|
|
|
xorl %esi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
|
|
|
|
/* Round 4 */
|
|
|
|
movl 32(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 36(%ebp), %edx
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
|
|
|
|
/* Round 3 */
|
|
|
|
movl 24(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 28(%ebp), %edx
|
|
|
|
xorl %esi, %eax
|
|
|
|
xorl %esi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
|
|
|
|
/* Round 2 */
|
|
|
|
movl 16(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 20(%ebp), %edx
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
|
|
|
|
/* Round 1 */
|
|
|
|
movl 8(%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 12(%ebp), %edx
|
|
|
|
xorl %esi, %eax
|
|
|
|
xorl %esi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %edi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %edi
|
|
|
|
|
|
|
|
/* Round 0 */
|
|
|
|
movl (%ebp), %eax
|
|
|
|
xorl %ebx, %ebx
|
|
|
|
movl 4(%ebp), %edx
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %edx
|
|
|
|
andl $0xfcfcfcfc, %eax
|
|
|
|
andl $0xcfcfcfcf, %edx
|
|
|
|
movb %al, %bl
|
|
|
|
movb %ah, %cl
|
|
|
|
rorl $4, %edx
|
|
|
|
movl des_SPtrans(%ebx),%ebp
|
|
|
|
movb %dl, %bl
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 0x200+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %dh, %cl
|
|
|
|
shrl $16, %eax
|
|
|
|
movl 0x100+des_SPtrans(%ebx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movb %ah, %bl
|
|
|
|
shrl $16, %edx
|
|
|
|
movl 0x300+des_SPtrans(%ecx),%ebp
|
|
|
|
xorl %ebp, %esi
|
|
|
|
movl 24(%esp), %ebp
|
|
|
|
movb %dh, %cl
|
|
|
|
andl $0xff, %eax
|
|
|
|
andl $0xff, %edx
|
|
|
|
movl 0x600+des_SPtrans(%ebx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x700+des_SPtrans(%ecx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x400+des_SPtrans(%eax),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
movl 0x500+des_SPtrans(%edx),%ebx
|
|
|
|
xorl %ebx, %esi
|
|
|
|
.L003end:
|
|
|
|
|
|
|
|
/* Fixup */
|
|
|
|
rorl $3, %edi
|
|
|
|
movl 20(%esp), %eax
|
|
|
|
rorl $3, %esi
|
|
|
|
movl %edi, (%eax)
|
|
|
|
movl %esi, 4(%eax)
|
|
|
|
popl %ebp
|
|
|
|
popl %ebx
|
|
|
|
popl %edi
|
|
|
|
popl %esi
|
|
|
|
ret
|
|
|
|
.des_encrypt2_end:
|
|
|
|
SIZE(des_encrypt2,.des_encrypt2_end-des_encrypt2)
|
|
|
|
.ident "desasm.pl"
|
|
|
|
.text
|
|
|
|
.align ALIGN
|
|
|
|
.globl des_encrypt3
|
|
|
|
TYPE(des_encrypt3,@function)
|
|
|
|
des_encrypt3:
|
|
|
|
pushl %ebx
|
|
|
|
movl 8(%esp), %ebx
|
|
|
|
pushl %ebp
|
|
|
|
pushl %esi
|
|
|
|
pushl %edi
|
|
|
|
|
|
|
|
/* Load the data words */
|
|
|
|
movl (%ebx), %edi
|
|
|
|
movl 4(%ebx), %esi
|
|
|
|
subl $12, %esp
|
|
|
|
|
|
|
|
/* IP */
|
|
|
|
roll $4, %edi
|
|
|
|
movl %edi, %edx
|
|
|
|
xorl %esi, %edi
|
|
|
|
andl $0xf0f0f0f0, %edi
|
|
|
|
xorl %edi, %edx
|
|
|
|
xorl %edi, %esi
|
|
|
|
|
|
|
|
roll $20, %esi
|
|
|
|
movl %esi, %edi
|
|
|
|
xorl %edx, %esi
|
|
|
|
andl $0xfff0000f, %esi
|
|
|
|
xorl %esi, %edi
|
|
|
|
xorl %esi, %edx
|
|
|
|
|
|
|
|
roll $14, %edi
|
|
|
|
movl %edi, %esi
|
|
|
|
xorl %edx, %edi
|
|
|
|
andl $0x33333333, %edi
|
|
|
|
xorl %edi, %esi
|
|
|
|
xorl %edi, %edx
|
|
|
|
|
|
|
|
roll $22, %edx
|
|
|
|
movl %edx, %edi
|
|
|
|
xorl %esi, %edx
|
|
|
|
andl $0x03fc03fc, %edx
|
|
|
|
xorl %edx, %edi
|
|
|
|
xorl %edx, %esi
|
|
|
|
|
|
|
|
roll $9, %edi
|
|
|
|
movl %edi, %edx
|
|
|
|
xorl %esi, %edi
|
|
|
|
andl $0xaaaaaaaa, %edi
|
|
|
|
xorl %edi, %edx
|
|
|
|
xorl %edi, %esi
|
|
|
|
|
|
|
|
rorl $3, %edx
|
|
|
|
rorl $2, %esi
|
|
|
|
movl %esi, 4(%ebx)
|
|
|
|
movl 36(%esp), %eax
|
|
|
|
movl %edx, (%ebx)
|
|
|
|
movl 40(%esp), %edi
|
|
|
|
movl 44(%esp), %esi
|
|
|
|
movl $1, 8(%esp)
|
|
|
|
movl %eax, 4(%esp)
|
|
|
|
movl %ebx, (%esp)
|
|
|
|
call des_encrypt2
|
|
|
|
movl $0, 8(%esp)
|
|
|
|
movl %edi, 4(%esp)
|
|
|
|
movl %ebx, (%esp)
|
|
|
|
call des_encrypt2
|
|
|
|
movl $1, 8(%esp)
|
|
|
|
movl %esi, 4(%esp)
|
|
|
|
movl %ebx, (%esp)
|
|
|
|
call des_encrypt2
|
|
|
|
addl $12, %esp
|
|
|
|
movl (%ebx), %edi
|
|
|
|
movl 4(%ebx), %esi
|
|
|
|
|
|
|
|
/* FP */
|
|
|
|
roll $2, %esi
|
|
|
|
roll $3, %edi
|
|
|
|
movl %edi, %eax
|
|
|
|
xorl %esi, %edi
|
|
|
|
andl $0xaaaaaaaa, %edi
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %esi
|
|
|
|
|
|
|
|
roll $23, %eax
|
|
|
|
movl %eax, %edi
|
|
|
|
xorl %esi, %eax
|
|
|
|
andl $0x03fc03fc, %eax
|
|
|
|
xorl %eax, %edi
|
|
|
|
xorl %eax, %esi
|
|
|
|
|
|
|
|
roll $10, %edi
|
|
|
|
movl %edi, %eax
|
|
|
|
xorl %esi, %edi
|
|
|
|
andl $0x33333333, %edi
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %esi
|
|
|
|
|
|
|
|
roll $18, %esi
|
|
|
|
movl %esi, %edi
|
|
|
|
xorl %eax, %esi
|
|
|
|
andl $0xfff0000f, %esi
|
|
|
|
xorl %esi, %edi
|
|
|
|
xorl %esi, %eax
|
|
|
|
|
|
|
|
roll $12, %edi
|
|
|
|
movl %edi, %esi
|
|
|
|
xorl %eax, %edi
|
|
|
|
andl $0xf0f0f0f0, %edi
|
|
|
|
xorl %edi, %esi
|
|
|
|
xorl %edi, %eax
|
|
|
|
|
|
|
|
rorl $4, %eax
|
|
|
|
movl %eax, (%ebx)
|
|
|
|
movl %esi, 4(%ebx)
|
|
|
|
popl %edi
|
|
|
|
popl %esi
|
|
|
|
popl %ebp
|
|
|
|
popl %ebx
|
|
|
|
ret
|
|
|
|
.des_encrypt3_end:
|
|
|
|
SIZE(des_encrypt3,.des_encrypt3_end-des_encrypt3)
|
|
|
|
.ident "desasm.pl"
|
|
|
|
.text
|
|
|
|
.align ALIGN
|
|
|
|
.globl des_decrypt3
|
|
|
|
TYPE(des_decrypt3,@function)
|
|
|
|
des_decrypt3:
|
|
|
|
pushl %ebx
|
|
|
|
movl 8(%esp), %ebx
|
|
|
|
pushl %ebp
|
|
|
|
pushl %esi
|
|
|
|
pushl %edi
|
|
|
|
|
|
|
|
/* Load the data words */
|
|
|
|
movl (%ebx), %edi
|
|
|
|
movl 4(%ebx), %esi
|
|
|
|
subl $12, %esp
|
|
|
|
|
|
|
|
/* IP */
|
|
|
|
roll $4, %edi
|
|
|
|
movl %edi, %edx
|
|
|
|
xorl %esi, %edi
|
|
|
|
andl $0xf0f0f0f0, %edi
|
|
|
|
xorl %edi, %edx
|
|
|
|
xorl %edi, %esi
|
|
|
|
|
|
|
|
roll $20, %esi
|
|
|
|
movl %esi, %edi
|
|
|
|
xorl %edx, %esi
|
|
|
|
andl $0xfff0000f, %esi
|
|
|
|
xorl %esi, %edi
|
|
|
|
xorl %esi, %edx
|
|
|
|
|
|
|
|
roll $14, %edi
|
|
|
|
movl %edi, %esi
|
|
|
|
xorl %edx, %edi
|
|
|
|
andl $0x33333333, %edi
|
|
|
|
xorl %edi, %esi
|
|
|
|
xorl %edi, %edx
|
|
|
|
|
|
|
|
roll $22, %edx
|
|
|
|
movl %edx, %edi
|
|
|
|
xorl %esi, %edx
|
|
|
|
andl $0x03fc03fc, %edx
|
|
|
|
xorl %edx, %edi
|
|
|
|
xorl %edx, %esi
|
|
|
|
|
|
|
|
roll $9, %edi
|
|
|
|
movl %edi, %edx
|
|
|
|
xorl %esi, %edi
|
|
|
|
andl $0xaaaaaaaa, %edi
|
|
|
|
xorl %edi, %edx
|
|
|
|
xorl %edi, %esi
|
|
|
|
|
|
|
|
rorl $3, %edx
|
|
|
|
rorl $2, %esi
|
|
|
|
movl %esi, 4(%ebx)
|
|
|
|
movl 36(%esp), %esi
|
|
|
|
movl %edx, (%ebx)
|
|
|
|
movl 40(%esp), %edi
|
|
|
|
movl 44(%esp), %eax
|
|
|
|
movl $0, 8(%esp)
|
|
|
|
movl %eax, 4(%esp)
|
|
|
|
movl %ebx, (%esp)
|
|
|
|
call des_encrypt2
|
|
|
|
movl $1, 8(%esp)
|
|
|
|
movl %edi, 4(%esp)
|
|
|
|
movl %ebx, (%esp)
|
|
|
|
call des_encrypt2
|
|
|
|
movl $0, 8(%esp)
|
|
|
|
movl %esi, 4(%esp)
|
|
|
|
movl %ebx, (%esp)
|
|
|
|
call des_encrypt2
|
|
|
|
addl $12, %esp
|
|
|
|
movl (%ebx), %edi
|
|
|
|
movl 4(%ebx), %esi
|
|
|
|
|
|
|
|
/* FP */
|
|
|
|
roll $2, %esi
|
|
|
|
roll $3, %edi
|
|
|
|
movl %edi, %eax
|
|
|
|
xorl %esi, %edi
|
|
|
|
andl $0xaaaaaaaa, %edi
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %esi
|
|
|
|
|
|
|
|
roll $23, %eax
|
|
|
|
movl %eax, %edi
|
|
|
|
xorl %esi, %eax
|
|
|
|
andl $0x03fc03fc, %eax
|
|
|
|
xorl %eax, %edi
|
|
|
|
xorl %eax, %esi
|
|
|
|
|
|
|
|
roll $10, %edi
|
|
|
|
movl %edi, %eax
|
|
|
|
xorl %esi, %edi
|
|
|
|
andl $0x33333333, %edi
|
|
|
|
xorl %edi, %eax
|
|
|
|
xorl %edi, %esi
|
|
|
|
|
|
|
|
roll $18, %esi
|
|
|
|
movl %esi, %edi
|
|
|
|
xorl %eax, %esi
|
|
|
|
andl $0xfff0000f, %esi
|
|
|
|
xorl %esi, %edi
|
|
|
|
xorl %esi, %eax
|
|
|
|
|
|
|
|
roll $12, %edi
|
|
|
|
movl %edi, %esi
|
|
|
|
xorl %eax, %edi
|
|
|
|
andl $0xf0f0f0f0, %edi
|
|
|
|
xorl %edi, %esi
|
|
|
|
xorl %edi, %eax
|
|
|
|
|
|
|
|
rorl $4, %eax
|
|
|
|
movl %eax, (%ebx)
|
|
|
|
movl %esi, 4(%ebx)
|
|
|
|
popl %edi
|
|
|
|
popl %esi
|
|
|
|
popl %ebp
|
|
|
|
popl %ebx
|
|
|
|
ret
|
|
|
|
.des_decrypt3_end:
|
|
|
|
SIZE(des_decrypt3,.des_decrypt3_end-des_decrypt3)
|
|
|
|
.ident "desasm.pl"
|
|
|
|
.text
|
|
|
|
.align ALIGN
|
|
|
|
.globl des_ncbc_encrypt
|
|
|
|
TYPE(des_ncbc_encrypt,@function)
|
|
|
|
des_ncbc_encrypt:
|
|
|
|
|
|
|
|
pushl %ebp
|
|
|
|
pushl %ebx
|
|
|
|
pushl %esi
|
|
|
|
pushl %edi
|
|
|
|
movl 28(%esp), %ebp
|
|
|
|
/* getting iv ptr from parameter 4 */
|
|
|
|
movl 36(%esp), %ebx
|
|
|
|
movl (%ebx), %esi
|
|
|
|
movl 4(%ebx), %edi
|
|
|
|
pushl %edi
|
|
|
|
pushl %esi
|
|
|
|
pushl %edi
|
|
|
|
pushl %esi
|
|
|
|
movl %esp, %ebx
|
|
|
|
movl 36(%esp), %esi
|
|
|
|
movl 40(%esp), %edi
|
|
|
|
/* getting encrypt flag from parameter 5 */
|
|
|
|
movl 56(%esp), %ecx
|
|
|
|
/* get and push parameter 5 */
|
|
|
|
pushl %ecx
|
|
|
|
/* get and push parameter 3 */
|
|
|
|
movl 52(%esp), %eax
|
|
|
|
pushl %eax
|
|
|
|
pushl %ebx
|
|
|
|
cmpl $0, %ecx
|
|
|
|
jz .L004decrypt
|
|
|
|
andl $4294967288, %ebp
|
|
|
|
movl 12(%esp), %eax
|
|
|
|
movl 16(%esp), %ebx
|
|
|
|
jz .L005encrypt_finish
|
|
|
|
.L006encrypt_loop:
|
|
|
|
movl (%esi), %ecx
|
|
|
|
movl 4(%esi), %edx
|
|
|
|
xorl %ecx, %eax
|
|
|
|
xorl %edx, %ebx
|
|
|
|
movl %eax, 12(%esp)
|
|
|
|
movl %ebx, 16(%esp)
|
|
|
|
call des_encrypt
|
|
|
|
movl 12(%esp), %eax
|
|
|
|
movl 16(%esp), %ebx
|
|
|
|
movl %eax, (%edi)
|
|
|
|
movl %ebx, 4(%edi)
|
|
|
|
addl $8, %esi
|
|
|
|
addl $8, %edi
|
|
|
|
subl $8, %ebp
|
|
|
|
jnz .L006encrypt_loop
|
|
|
|
.L005encrypt_finish:
|
|
|
|
movl 56(%esp), %ebp
|
|
|
|
andl $7, %ebp
|
|
|
|
jz .L007finish
|
|
|
|
xorl %ecx, %ecx
|
|
|
|
xorl %edx, %edx
|
|
|
|
movl .L008cbc_enc_jmp_table(,%ebp,4),%ebp
|
|
|
|
jmp *%ebp
|
|
|
|
.L009ej7:
|
|
|
|
movb 6(%esi), %dh
|
|
|
|
sall $8, %edx
|
|
|
|
.L010ej6:
|
|
|
|
movb 5(%esi), %dh
|
|
|
|
.L011ej5:
|
|
|
|
movb 4(%esi), %dl
|
|
|
|
.L012ej4:
|
|
|
|
movl (%esi), %ecx
|
|
|
|
jmp .L013ejend
|
|
|
|
.L014ej3:
|
|
|
|
movb 2(%esi), %ch
|
|
|
|
sall $8, %ecx
|
|
|
|
.L015ej2:
|
|
|
|
movb 1(%esi), %ch
|
|
|
|
.L016ej1:
|
|
|
|
movb (%esi), %cl
|
|
|
|
.L013ejend:
|
|
|
|
xorl %ecx, %eax
|
|
|
|
xorl %edx, %ebx
|
|
|
|
movl %eax, 12(%esp)
|
|
|
|
movl %ebx, 16(%esp)
|
|
|
|
call des_encrypt
|
|
|
|
movl 12(%esp), %eax
|
|
|
|
movl 16(%esp), %ebx
|
|
|
|
movl %eax, (%edi)
|
|
|
|
movl %ebx, 4(%edi)
|
|
|
|
jmp .L007finish
|
|
|
|
.align ALIGN
|
|
|
|
.L004decrypt:
|
|
|
|
andl $4294967288, %ebp
|
|
|
|
movl 20(%esp), %eax
|
|
|
|
movl 24(%esp), %ebx
|
|
|
|
jz .L017decrypt_finish
|
|
|
|
.L018decrypt_loop:
|
|
|
|
movl (%esi), %eax
|
|
|
|
movl 4(%esi), %ebx
|
|
|
|
movl %eax, 12(%esp)
|
|
|
|
movl %ebx, 16(%esp)
|
|
|
|
call des_encrypt
|
|
|
|
movl 12(%esp), %eax
|
|
|
|
movl 16(%esp), %ebx
|
|
|
|
movl 20(%esp), %ecx
|
|
|
|
movl 24(%esp), %edx
|
|
|
|
xorl %eax, %ecx
|
|
|
|
xorl %ebx, %edx
|
|
|
|
movl (%esi), %eax
|
|
|
|
movl 4(%esi), %ebx
|
|
|
|
movl %ecx, (%edi)
|
|
|
|
movl %edx, 4(%edi)
|
|
|
|
movl %eax, 20(%esp)
|
|
|
|
movl %ebx, 24(%esp)
|
|
|
|
addl $8, %esi
|
|
|
|
addl $8, %edi
|
|
|
|
subl $8, %ebp
|
|
|
|
jnz .L018decrypt_loop
|
|
|
|
.L017decrypt_finish:
|
|
|
|
movl 56(%esp), %ebp
|
|
|
|
andl $7, %ebp
|
|
|
|
jz .L007finish
|
|
|
|
movl (%esi), %eax
|
|
|
|
movl 4(%esi), %ebx
|
|
|
|
movl %eax, 12(%esp)
|
|
|
|
movl %ebx, 16(%esp)
|
|
|
|
call des_encrypt
|
|
|
|
movl 12(%esp), %eax
|
|
|
|
movl 16(%esp), %ebx
|
|
|
|
movl 20(%esp), %ecx
|
|
|
|
movl 24(%esp), %edx
|
|
|
|
xorl %eax, %ecx
|
|
|
|
xorl %ebx, %edx
|
|
|
|
movl (%esi), %eax
|
|
|
|
movl 4(%esi), %ebx
|
|
|
|
.L019dj7:
|
|
|
|
rorl $16, %edx
|
|
|
|
movb %dl, 6(%edi)
|
|
|
|
shrl $16, %edx
|
|
|
|
.L020dj6:
|
|
|
|
movb %dh, 5(%edi)
|
|
|
|
.L021dj5:
|
|
|
|
movb %dl, 4(%edi)
|
|
|
|
.L022dj4:
|
|
|
|
movl %ecx, (%edi)
|
|
|
|
jmp .L023djend
|
|
|
|
.L024dj3:
|
|
|
|
rorl $16, %ecx
|
|
|
|
movb %cl, 2(%edi)
|
|
|
|
sall $16, %ecx
|
|
|
|
.L025dj2:
|
|
|
|
movb %ch, 1(%esi)
|
|
|
|
.L026dj1:
|
|
|
|
movb %cl, (%esi)
|
|
|
|
.L023djend:
|
|
|
|
jmp .L007finish
|
|
|
|
.align ALIGN
|
|
|
|
.L007finish:
|
|
|
|
movl 64(%esp), %ecx
|
|
|
|
addl $28, %esp
|
|
|
|
movl %eax, (%ecx)
|
|
|
|
movl %ebx, 4(%ecx)
|
|
|
|
popl %edi
|
|
|
|
popl %esi
|
|
|
|
popl %ebx
|
|
|
|
popl %ebp
|
|
|
|
ret
|
|
|
|
.align ALIGN
|
|
|
|
.L008cbc_enc_jmp_table:
|
|
|
|
.long 0
|
|
|
|
.long .L016ej1
|
|
|
|
.long .L015ej2
|
|
|
|
.long .L014ej3
|
|
|
|
.long .L012ej4
|
|
|
|
.long .L011ej5
|
|
|
|
.long .L010ej6
|
|
|
|
.long .L009ej7
|
|
|
|
.align ALIGN
|
|
|
|
.L027cbc_dec_jmp_table:
|
|
|
|
.long 0
|
|
|
|
.long .L026dj1
|
|
|
|
.long .L025dj2
|
|
|
|
.long .L024dj3
|
|
|
|
.long .L022dj4
|
|
|
|
.long .L021dj5
|
|
|
|
.long .L020dj6
|
|
|
|
.long .L019dj7
|
|
|
|
.des_ncbc_encrypt_end:
|
|
|
|
SIZE(des_ncbc_encrypt,.des_ncbc_encrypt_end-des_ncbc_encrypt)
|
|
|
|
.ident "desasm.pl"
|
|
|
|
.text
|
|
|
|
.align ALIGN
|
|
|
|
.globl des_ede3_cbc_encrypt
|
|
|
|
TYPE(des_ede3_cbc_encrypt,@function)
|
|
|
|
des_ede3_cbc_encrypt:
|
1998-12-21 10:52:47 +00:00
|
|
|
|
1998-12-21 10:56:39 +00:00
|
|
|
pushl %ebp
|
|
|
|
pushl %ebx
|
|
|
|
pushl %esi
|
|
|
|
pushl %edi
|
|
|
|
movl 28(%esp), %ebp
|
|
|
|
/* getting iv ptr from parameter 6 */
|
|
|
|
movl 44(%esp), %ebx
|
|
|
|
movl (%ebx), %esi
|
|
|
|
movl 4(%ebx), %edi
|
|
|
|
pushl %edi
|
|
|
|
pushl %esi
|
|
|
|
pushl %edi
|
|
|
|
pushl %esi
|
|
|
|
movl %esp, %ebx
|
|
|
|
movl 36(%esp), %esi
|
|
|
|
movl 40(%esp), %edi
|
|
|
|
/* getting encrypt flag from parameter 7 */
|
|
|
|
movl 64(%esp), %ecx
|
|
|
|
/* get and push parameter 5 */
|
|
|
|
movl 56(%esp), %eax
|
|
|
|
pushl %eax
|
|
|
|
/* get and push parameter 4 */
|
|
|
|
movl 56(%esp), %eax
|
|
|
|
pushl %eax
|
|
|
|
/* get and push parameter 3 */
|
|
|
|
movl 56(%esp), %eax
|
|
|
|
pushl %eax
|
|
|
|
pushl %ebx
|
|
|
|
cmpl $0, %ecx
|
|
|
|
jz .L028decrypt
|
|
|
|
andl $4294967288, %ebp
|
|
|
|
movl 16(%esp), %eax
|
|
|
|
movl 20(%esp), %ebx
|
|
|
|
jz .L029encrypt_finish
|
|
|
|
.L030encrypt_loop:
|
|
|
|
movl (%esi), %ecx
|
|
|
|
movl 4(%esi), %edx
|
|
|
|
xorl %ecx, %eax
|
|
|
|
xorl %edx, %ebx
|
|
|
|
movl %eax, 16(%esp)
|
|
|
|
movl %ebx, 20(%esp)
|
|
|
|
call des_encrypt3
|
|
|
|
movl 16(%esp), %eax
|
|
|
|
movl 20(%esp), %ebx
|
|
|
|
movl %eax, (%edi)
|
|
|
|
movl %ebx, 4(%edi)
|
|
|
|
addl $8, %esi
|
|
|
|
addl $8, %edi
|
|
|
|
subl $8, %ebp
|
|
|
|
jnz .L030encrypt_loop
|
|
|
|
.L029encrypt_finish:
|
|
|
|
movl 60(%esp), %ebp
|
|
|
|
andl $7, %ebp
|
|
|
|
jz .L031finish
|
|
|
|
xorl %ecx, %ecx
|
|
|
|
xorl %edx, %edx
|
|
|
|
movl .L032cbc_enc_jmp_table(,%ebp,4),%ebp
|
|
|
|
jmp *%ebp
|
|
|
|
.L033ej7:
|
|
|
|
movb 6(%esi), %dh
|
|
|
|
sall $8, %edx
|
|
|
|
.L034ej6:
|
|
|
|
movb 5(%esi), %dh
|
|
|
|
.L035ej5:
|
|
|
|
movb 4(%esi), %dl
|
|
|
|
.L036ej4:
|
|
|
|
movl (%esi), %ecx
|
|
|
|
jmp .L037ejend
|
|
|
|
.L038ej3:
|
|
|
|
movb 2(%esi), %ch
|
|
|
|
sall $8, %ecx
|
|
|
|
.L039ej2:
|
|
|
|
movb 1(%esi), %ch
|
|
|
|
.L040ej1:
|
|
|
|
movb (%esi), %cl
|
|
|
|
.L037ejend:
|
|
|
|
xorl %ecx, %eax
|
|
|
|
xorl %edx, %ebx
|
|
|
|
movl %eax, 16(%esp)
|
|
|
|
movl %ebx, 20(%esp)
|
|
|
|
call des_encrypt3
|
|
|
|
movl 16(%esp), %eax
|
|
|
|
movl 20(%esp), %ebx
|
|
|
|
movl %eax, (%edi)
|
|
|
|
movl %ebx, 4(%edi)
|
|
|
|
jmp .L031finish
|
|
|
|
.align ALIGN
|
|
|
|
.L028decrypt:
|
|
|
|
andl $4294967288, %ebp
|
|
|
|
movl 24(%esp), %eax
|
|
|
|
movl 28(%esp), %ebx
|
|
|
|
jz .L041decrypt_finish
|
|
|
|
.L042decrypt_loop:
|
|
|
|
movl (%esi), %eax
|
|
|
|
movl 4(%esi), %ebx
|
|
|
|
movl %eax, 16(%esp)
|
|
|
|
movl %ebx, 20(%esp)
|
|
|
|
call des_decrypt3
|
|
|
|
movl 16(%esp), %eax
|
|
|
|
movl 20(%esp), %ebx
|
|
|
|
movl 24(%esp), %ecx
|
|
|
|
movl 28(%esp), %edx
|
|
|
|
xorl %eax, %ecx
|
|
|
|
xorl %ebx, %edx
|
|
|
|
movl (%esi), %eax
|
|
|
|
movl 4(%esi), %ebx
|
|
|
|
movl %ecx, (%edi)
|
|
|
|
movl %edx, 4(%edi)
|
|
|
|
movl %eax, 24(%esp)
|
|
|
|
movl %ebx, 28(%esp)
|
|
|
|
addl $8, %esi
|
|
|
|
addl $8, %edi
|
|
|
|
subl $8, %ebp
|
|
|
|
jnz .L042decrypt_loop
|
|
|
|
.L041decrypt_finish:
|
|
|
|
movl 60(%esp), %ebp
|
|
|
|
andl $7, %ebp
|
|
|
|
jz .L031finish
|
|
|
|
movl (%esi), %eax
|
|
|
|
movl 4(%esi), %ebx
|
|
|
|
movl %eax, 16(%esp)
|
|
|
|
movl %ebx, 20(%esp)
|
|
|
|
call des_decrypt3
|
|
|
|
movl 16(%esp), %eax
|
|
|
|
movl 20(%esp), %ebx
|
|
|
|
movl 24(%esp), %ecx
|
|
|
|
movl 28(%esp), %edx
|
|
|
|
xorl %eax, %ecx
|
|
|
|
xorl %ebx, %edx
|
|
|
|
movl (%esi), %eax
|
|
|
|
movl 4(%esi), %ebx
|
|
|
|
.L043dj7:
|
|
|
|
rorl $16, %edx
|
|
|
|
movb %dl, 6(%edi)
|
|
|
|
shrl $16, %edx
|
|
|
|
.L044dj6:
|
|
|
|
movb %dh, 5(%edi)
|
|
|
|
.L045dj5:
|
|
|
|
movb %dl, 4(%edi)
|
|
|
|
.L046dj4:
|
|
|
|
movl %ecx, (%edi)
|
|
|
|
jmp .L047djend
|
|
|
|
.L048dj3:
|
|
|
|
rorl $16, %ecx
|
|
|
|
movb %cl, 2(%edi)
|
|
|
|
sall $16, %ecx
|
|
|
|
.L049dj2:
|
|
|
|
movb %ch, 1(%esi)
|
|
|
|
.L050dj1:
|
|
|
|
movb %cl, (%esi)
|
|
|
|
.L047djend:
|
|
|
|
jmp .L031finish
|
|
|
|
.align ALIGN
|
|
|
|
.L031finish:
|
|
|
|
movl 76(%esp), %ecx
|
|
|
|
addl $32, %esp
|
|
|
|
movl %eax, (%ecx)
|
|
|
|
movl %ebx, 4(%ecx)
|
|
|
|
popl %edi
|
|
|
|
popl %esi
|
|
|
|
popl %ebx
|
|
|
|
popl %ebp
|
|
|
|
ret
|
|
|
|
.align ALIGN
|
|
|
|
.L032cbc_enc_jmp_table:
|
|
|
|
.long 0
|
|
|
|
.long .L040ej1
|
|
|
|
.long .L039ej2
|
|
|
|
.long .L038ej3
|
|
|
|
.long .L036ej4
|
|
|
|
.long .L035ej5
|
|
|
|
.long .L034ej6
|
|
|
|
.long .L033ej7
|
|
|
|
.align ALIGN
|
|
|
|
.L051cbc_dec_jmp_table:
|
|
|
|
.long 0
|
|
|
|
.long .L050dj1
|
|
|
|
.long .L049dj2
|
|
|
|
.long .L048dj3
|
|
|
|
.long .L046dj4
|
|
|
|
.long .L045dj5
|
|
|
|
.long .L044dj6
|
|
|
|
.long .L043dj7
|
|
|
|
.des_ede3_cbc_encrypt_end:
|
|
|
|
SIZE(des_ede3_cbc_encrypt,.des_ede3_cbc_encrypt_end-des_ede3_cbc_encrypt)
|
|
|
|
.ident "desasm.pl"
|