From 3fc228fb4c1a39aceaee3d7013365042a6077bd0 Mon Sep 17 00:00:00 2001 From: cvs2svn Date: Fri, 26 Mar 1999 18:24:03 +0000 Subject: This commit was manufactured by cvs2git to create branch 'OPENBSD_2_5'. --- src/lib/libcrypto/des/asm/d-win32.asm | 3132 +++++++++++++++++++++++++++++++ src/lib/libcrypto/des/asm/dx86unix.cpp | 3202 ++++++++++++++++++++++++++++++++ src/lib/libcrypto/des/asm/y-win32.asm | 929 +++++++++ src/lib/libcrypto/des/asm/yx86unix.cpp | 976 ++++++++++ 4 files changed, 8239 insertions(+) create mode 100644 src/lib/libcrypto/des/asm/d-win32.asm create mode 100644 src/lib/libcrypto/des/asm/dx86unix.cpp create mode 100644 src/lib/libcrypto/des/asm/y-win32.asm create mode 100644 src/lib/libcrypto/des/asm/yx86unix.cpp (limited to 'src/lib/libcrypto/des/asm') diff --git a/src/lib/libcrypto/des/asm/d-win32.asm b/src/lib/libcrypto/des/asm/d-win32.asm new file mode 100644 index 0000000000..9e3dc9cd87 --- /dev/null +++ b/src/lib/libcrypto/des/asm/d-win32.asm @@ -0,0 +1,3132 @@ + ; Don't even think of reading this code + ; It was automatically generated by des-586.pl + ; Which is a perl program used to generate the x86 assember for + ; any of elf, a.out, BSDI,Win32, or Solaris + ; eric + ; + TITLE des-586.asm + .386 +.model FLAT +_TEXT SEGMENT +PUBLIC _des_encrypt +EXTRN _des_SPtrans:DWORD +_des_encrypt PROC NEAR + push esi + push edi + ; + ; Load the 2 words + mov esi, DWORD PTR 12[esp] + xor ecx, ecx + push ebx + push ebp + mov eax, DWORD PTR [esi] + mov ebx, DWORD PTR 28[esp] + mov edi, DWORD PTR 4[esi] + ; + ; IP + rol eax, 4 + mov esi, eax + xor eax, edi + and eax, 0f0f0f0f0h + xor esi, eax + xor edi, eax + ; + rol edi, 20 + mov eax, edi + xor edi, esi + and edi, 0fff0000fh + xor eax, edi + xor esi, edi + ; + rol eax, 14 + mov edi, eax + xor eax, esi + and eax, 033333333h + xor edi, eax + xor esi, eax + ; + rol esi, 22 + mov eax, esi + xor esi, edi + and esi, 003fc03fch + xor eax, esi + xor edi, esi + ; + rol eax, 9 + mov esi, eax + xor eax, edi + and eax, 0aaaaaaaah + xor esi, eax + xor edi, eax + ; + rol edi, 1 + mov ebp, DWORD PTR 24[esp] + cmp ebx, 0 + je $L000start_decrypt + ; + ; Round 0 + mov eax, DWORD PTR [ebp] + xor ebx, ebx + mov edx, DWORD PTR 4[ebp] + xor eax, esi + xor edx, esi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 1 + mov eax, DWORD PTR 8[ebp] + xor ebx, ebx + mov edx, DWORD PTR 12[ebp] + xor eax, edi + xor edx, edi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx + ; + ; Round 2 + mov eax, DWORD PTR 16[ebp] + xor ebx, ebx + mov edx, DWORD PTR 20[ebp] + xor eax, esi + xor edx, esi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 3 + mov eax, DWORD PTR 24[ebp] + xor ebx, ebx + mov edx, DWORD PTR 28[ebp] + xor eax, edi + xor edx, edi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx + ; + ; Round 4 + mov eax, DWORD PTR 32[ebp] + xor ebx, ebx + mov edx, DWORD PTR 36[ebp] + xor eax, esi + xor edx, esi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 5 + mov eax, DWORD PTR 40[ebp] + xor ebx, ebx + mov edx, DWORD PTR 44[ebp] + xor eax, edi + xor edx, edi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx + ; + ; Round 6 + mov eax, DWORD PTR 48[ebp] + xor ebx, ebx + mov edx, DWORD PTR 52[ebp] + xor eax, esi + xor edx, esi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 7 + mov eax, DWORD PTR 56[ebp] + xor ebx, ebx + mov edx, DWORD PTR 60[ebp] + xor eax, edi + xor edx, edi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx + ; + ; Round 8 + mov eax, DWORD PTR 64[ebp] + xor ebx, ebx + mov edx, DWORD PTR 68[ebp] + xor eax, esi + xor edx, esi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 9 + mov eax, DWORD PTR 72[ebp] + xor ebx, ebx + mov edx, DWORD PTR 76[ebp] + xor eax, edi + xor edx, edi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx + ; + ; Round 10 + mov eax, DWORD PTR 80[ebp] + xor ebx, ebx + mov edx, DWORD PTR 84[ebp] + xor eax, esi + xor edx, esi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 11 + mov eax, DWORD PTR 88[ebp] + xor ebx, ebx + mov edx, DWORD PTR 92[ebp] + xor eax, edi + xor edx, edi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx + ; + ; Round 12 + mov eax, DWORD PTR 96[ebp] + xor ebx, ebx + mov edx, DWORD PTR 100[ebp] + xor eax, esi + xor edx, esi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 13 + mov eax, DWORD PTR 104[ebp] + xor ebx, ebx + mov edx, DWORD PTR 108[ebp] + xor eax, edi + xor edx, edi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx + ; + ; Round 14 + mov eax, DWORD PTR 112[ebp] + xor ebx, ebx + mov edx, DWORD PTR 116[ebp] + xor eax, esi + xor edx, esi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 15 + mov eax, DWORD PTR 120[ebp] + xor ebx, ebx + mov edx, DWORD PTR 124[ebp] + xor eax, edi + xor edx, edi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx + jmp $L001end +$L000start_decrypt: + ; + ; Round 15 + mov eax, DWORD PTR 120[ebp] + xor ebx, ebx + mov edx, DWORD PTR 124[ebp] + xor eax, esi + xor edx, esi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 14 + mov eax, DWORD PTR 112[ebp] + xor ebx, ebx + mov edx, DWORD PTR 116[ebp] + xor eax, edi + xor edx, edi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx + ; + ; Round 13 + mov eax, DWORD PTR 104[ebp] + xor ebx, ebx + mov edx, DWORD PTR 108[ebp] + xor eax, esi + xor edx, esi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 12 + mov eax, DWORD PTR 96[ebp] + xor ebx, ebx + mov edx, DWORD PTR 100[ebp] + xor eax, edi + xor edx, edi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx + ; + ; Round 11 + mov eax, DWORD PTR 88[ebp] + xor ebx, ebx + mov edx, DWORD PTR 92[ebp] + xor eax, esi + xor edx, esi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 10 + mov eax, DWORD PTR 80[ebp] + xor ebx, ebx + mov edx, DWORD PTR 84[ebp] + xor eax, edi + xor edx, edi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx + ; + ; Round 9 + mov eax, DWORD PTR 72[ebp] + xor ebx, ebx + mov edx, DWORD PTR 76[ebp] + xor eax, esi + xor edx, esi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 8 + mov eax, DWORD PTR 64[ebp] + xor ebx, ebx + mov edx, DWORD PTR 68[ebp] + xor eax, edi + xor edx, edi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx + ; + ; Round 7 + mov eax, DWORD PTR 56[ebp] + xor ebx, ebx + mov edx, DWORD PTR 60[ebp] + xor eax, esi + xor edx, esi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 6 + mov eax, DWORD PTR 48[ebp] + xor ebx, ebx + mov edx, DWORD PTR 52[ebp] + xor eax, edi + xor edx, edi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx + ; + ; Round 5 + mov eax, DWORD PTR 40[ebp] + xor ebx, ebx + mov edx, DWORD PTR 44[ebp] + xor eax, esi + xor edx, esi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 4 + mov eax, DWORD PTR 32[ebp] + xor ebx, ebx + mov edx, DWORD PTR 36[ebp] + xor eax, edi + xor edx, edi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx + ; + ; Round 3 + mov eax, DWORD PTR 24[ebp] + xor ebx, ebx + mov edx, DWORD PTR 28[ebp] + xor eax, esi + xor edx, esi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 2 + mov eax, DWORD PTR 16[ebp] + xor ebx, ebx + mov edx, DWORD PTR 20[ebp] + xor eax, edi + xor edx, edi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx + ; + ; Round 1 + mov eax, DWORD PTR 8[ebp] + xor ebx, ebx + mov edx, DWORD PTR 12[ebp] + xor eax, esi + xor edx, esi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 0 + mov eax, DWORD PTR [ebp] + xor ebx, ebx + mov edx, DWORD PTR 4[ebp] + xor eax, edi + xor edx, edi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx +$L001end: + ; + ; FP + mov edx, DWORD PTR 20[esp] + ror esi, 1 + mov eax, edi + xor edi, esi + and edi, 0aaaaaaaah + xor eax, edi + xor esi, edi + ; + rol eax, 23 + mov edi, eax + xor eax, esi + and eax, 003fc03fch + xor edi, eax + xor esi, eax + ; + rol edi, 10 + mov eax, edi + xor edi, esi + and edi, 033333333h + xor eax, edi + xor esi, edi + ; + rol esi, 18 + mov edi, esi + xor esi, eax + and esi, 0fff0000fh + xor edi, esi + xor eax, esi + ; + rol edi, 12 + mov esi, edi + xor edi, eax + and edi, 0f0f0f0f0h + xor esi, edi + xor eax, edi + ; + ror eax, 4 + mov DWORD PTR [edx],eax + mov DWORD PTR 4[edx],esi + pop ebp + pop ebx + pop edi + pop esi + ret +_des_encrypt ENDP +_TEXT ENDS +_TEXT SEGMENT +PUBLIC _des_encrypt2 +EXTRN _des_SPtrans:DWORD +_des_encrypt2 PROC NEAR + push esi + push edi + ; + ; Load the 2 words + mov eax, DWORD PTR 12[esp] + xor ecx, ecx + push ebx + push ebp + mov esi, DWORD PTR [eax] + mov ebx, DWORD PTR 28[esp] + rol esi, 3 + mov edi, DWORD PTR 4[eax] + rol edi, 3 + mov ebp, DWORD PTR 24[esp] + cmp ebx, 0 + je $L002start_decrypt + ; + ; Round 0 + mov eax, DWORD PTR [ebp] + xor ebx, ebx + mov edx, DWORD PTR 4[ebp] + xor eax, esi + xor edx, esi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 1 + mov eax, DWORD PTR 8[ebp] + xor ebx, ebx + mov edx, DWORD PTR 12[ebp] + xor eax, edi + xor edx, edi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx + ; + ; Round 2 + mov eax, DWORD PTR 16[ebp] + xor ebx, ebx + mov edx, DWORD PTR 20[ebp] + xor eax, esi + xor edx, esi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 3 + mov eax, DWORD PTR 24[ebp] + xor ebx, ebx + mov edx, DWORD PTR 28[ebp] + xor eax, edi + xor edx, edi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx + ; + ; Round 4 + mov eax, DWORD PTR 32[ebp] + xor ebx, ebx + mov edx, DWORD PTR 36[ebp] + xor eax, esi + xor edx, esi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 5 + mov eax, DWORD PTR 40[ebp] + xor ebx, ebx + mov edx, DWORD PTR 44[ebp] + xor eax, edi + xor edx, edi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx + ; + ; Round 6 + mov eax, DWORD PTR 48[ebp] + xor ebx, ebx + mov edx, DWORD PTR 52[ebp] + xor eax, esi + xor edx, esi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 7 + mov eax, DWORD PTR 56[ebp] + xor ebx, ebx + mov edx, DWORD PTR 60[ebp] + xor eax, edi + xor edx, edi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx + ; + ; Round 8 + mov eax, DWORD PTR 64[ebp] + xor ebx, ebx + mov edx, DWORD PTR 68[ebp] + xor eax, esi + xor edx, esi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 9 + mov eax, DWORD PTR 72[ebp] + xor ebx, ebx + mov edx, DWORD PTR 76[ebp] + xor eax, edi + xor edx, edi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx + ; + ; Round 10 + mov eax, DWORD PTR 80[ebp] + xor ebx, ebx + mov edx, DWORD PTR 84[ebp] + xor eax, esi + xor edx, esi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 11 + mov eax, DWORD PTR 88[ebp] + xor ebx, ebx + mov edx, DWORD PTR 92[ebp] + xor eax, edi + xor edx, edi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx + ; + ; Round 12 + mov eax, DWORD PTR 96[ebp] + xor ebx, ebx + mov edx, DWORD PTR 100[ebp] + xor eax, esi + xor edx, esi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 13 + mov eax, DWORD PTR 104[ebp] + xor ebx, ebx + mov edx, DWORD PTR 108[ebp] + xor eax, edi + xor edx, edi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx + ; + ; Round 14 + mov eax, DWORD PTR 112[ebp] + xor ebx, ebx + mov edx, DWORD PTR 116[ebp] + xor eax, esi + xor edx, esi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 15 + mov eax, DWORD PTR 120[ebp] + xor ebx, ebx + mov edx, DWORD PTR 124[ebp] + xor eax, edi + xor edx, edi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx + jmp $L003end +$L002start_decrypt: + ; + ; Round 15 + mov eax, DWORD PTR 120[ebp] + xor ebx, ebx + mov edx, DWORD PTR 124[ebp] + xor eax, esi + xor edx, esi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 14 + mov eax, DWORD PTR 112[ebp] + xor ebx, ebx + mov edx, DWORD PTR 116[ebp] + xor eax, edi + xor edx, edi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx + ; + ; Round 13 + mov eax, DWORD PTR 104[ebp] + xor ebx, ebx + mov edx, DWORD PTR 108[ebp] + xor eax, esi + xor edx, esi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 12 + mov eax, DWORD PTR 96[ebp] + xor ebx, ebx + mov edx, DWORD PTR 100[ebp] + xor eax, edi + xor edx, edi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx + ; + ; Round 11 + mov eax, DWORD PTR 88[ebp] + xor ebx, ebx + mov edx, DWORD PTR 92[ebp] + xor eax, esi + xor edx, esi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 10 + mov eax, DWORD PTR 80[ebp] + xor ebx, ebx + mov edx, DWORD PTR 84[ebp] + xor eax, edi + xor edx, edi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx + ; + ; Round 9 + mov eax, DWORD PTR 72[ebp] + xor ebx, ebx + mov edx, DWORD PTR 76[ebp] + xor eax, esi + xor edx, esi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 8 + mov eax, DWORD PTR 64[ebp] + xor ebx, ebx + mov edx, DWORD PTR 68[ebp] + xor eax, edi + xor edx, edi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx + ; + ; Round 7 + mov eax, DWORD PTR 56[ebp] + xor ebx, ebx + mov edx, DWORD PTR 60[ebp] + xor eax, esi + xor edx, esi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 6 + mov eax, DWORD PTR 48[ebp] + xor ebx, ebx + mov edx, DWORD PTR 52[ebp] + xor eax, edi + xor edx, edi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx + ; + ; Round 5 + mov eax, DWORD PTR 40[ebp] + xor ebx, ebx + mov edx, DWORD PTR 44[ebp] + xor eax, esi + xor edx, esi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 4 + mov eax, DWORD PTR 32[ebp] + xor ebx, ebx + mov edx, DWORD PTR 36[ebp] + xor eax, edi + xor edx, edi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx + ; + ; Round 3 + mov eax, DWORD PTR 24[ebp] + xor ebx, ebx + mov edx, DWORD PTR 28[ebp] + xor eax, esi + xor edx, esi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 2 + mov eax, DWORD PTR 16[ebp] + xor ebx, ebx + mov edx, DWORD PTR 20[ebp] + xor eax, edi + xor edx, edi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx + ; + ; Round 1 + mov eax, DWORD PTR 8[ebp] + xor ebx, ebx + mov edx, DWORD PTR 12[ebp] + xor eax, esi + xor edx, esi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 0 + mov eax, DWORD PTR [ebp] + xor ebx, ebx + mov edx, DWORD PTR 4[ebp] + xor eax, edi + xor edx, edi + and eax, 0fcfcfcfch + and edx, 0cfcfcfcfh + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 24[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx +$L003end: + ; + ; Fixup + ror edi, 3 + mov eax, DWORD PTR 20[esp] + ror esi, 3 + mov DWORD PTR [eax],edi + mov DWORD PTR 4[eax],esi + pop ebp + pop ebx + pop edi + pop esi + ret +_des_encrypt2 ENDP +_TEXT ENDS +_TEXT SEGMENT +PUBLIC _des_encrypt3 + +_des_encrypt3 PROC NEAR + push ebx + mov ebx, DWORD PTR 8[esp] + push ebp + push esi + push edi + ; + ; Load the data words + mov edi, DWORD PTR [ebx] + mov esi, DWORD PTR 4[ebx] + sub esp, 12 + ; + ; IP + rol edi, 4 + mov edx, edi + xor edi, esi + and edi, 0f0f0f0f0h + xor edx, edi + xor esi, edi + ; + rol esi, 20 + mov edi, esi + xor esi, edx + and esi, 0fff0000fh + xor edi, esi + xor edx, esi + ; + rol edi, 14 + mov esi, edi + xor edi, edx + and edi, 033333333h + xor esi, edi + xor edx, edi + ; + rol edx, 22 + mov edi, edx + xor edx, esi + and edx, 003fc03fch + xor edi, edx + xor esi, edx + ; + rol edi, 9 + mov edx, edi + xor edi, esi + and edi, 0aaaaaaaah + xor edx, edi + xor esi, edi + ; + ror edx, 3 + ror esi, 2 + mov DWORD PTR 4[ebx],esi + mov eax, DWORD PTR 36[esp] + mov DWORD PTR [ebx],edx + mov edi, DWORD PTR 40[esp] + mov esi, DWORD PTR 44[esp] + mov DWORD PTR 8[esp],1 + mov DWORD PTR 4[esp],eax + mov DWORD PTR [esp],ebx + call _des_encrypt2 + mov DWORD PTR 8[esp],0 + mov DWORD PTR 4[esp],edi + mov DWORD PTR [esp],ebx + call _des_encrypt2 + mov DWORD PTR 8[esp],1 + mov DWORD PTR 4[esp],esi + mov DWORD PTR [esp],ebx + call _des_encrypt2 + add esp, 12 + mov edi, DWORD PTR [ebx] + mov esi, DWORD PTR 4[ebx] + ; + ; FP + rol esi, 2 + rol edi, 3 + mov eax, edi + xor edi, esi + and edi, 0aaaaaaaah + xor eax, edi + xor esi, edi + ; + rol eax, 23 + mov edi, eax + xor eax, esi + and eax, 003fc03fch + xor edi, eax + xor esi, eax + ; + rol edi, 10 + mov eax, edi + xor edi, esi + and edi, 033333333h + xor eax, edi + xor esi, edi + ; + rol esi, 18 + mov edi, esi + xor esi, eax + and esi, 0fff0000fh + xor edi, esi + xor eax, esi + ; + rol edi, 12 + mov esi, edi + xor edi, eax + and edi, 0f0f0f0f0h + xor esi, edi + xor eax, edi + ; + ror eax, 4 + mov DWORD PTR [ebx],eax + mov DWORD PTR 4[ebx],esi + pop edi + pop esi + pop ebp + pop ebx + ret +_des_encrypt3 ENDP +_TEXT ENDS +_TEXT SEGMENT +PUBLIC _des_decrypt3 + +_des_decrypt3 PROC NEAR + push ebx + mov ebx, DWORD PTR 8[esp] + push ebp + push esi + push edi + ; + ; Load the data words + mov edi, DWORD PTR [ebx] + mov esi, DWORD PTR 4[ebx] + sub esp, 12 + ; + ; IP + rol edi, 4 + mov edx, edi + xor edi, esi + and edi, 0f0f0f0f0h + xor edx, edi + xor esi, edi + ; + rol esi, 20 + mov edi, esi + xor esi, edx + and esi, 0fff0000fh + xor edi, esi + xor edx, esi + ; + rol edi, 14 + mov esi, edi + xor edi, edx + and edi, 033333333h + xor esi, edi + xor edx, edi + ; + rol edx, 22 + mov edi, edx + xor edx, esi + and edx, 003fc03fch + xor edi, edx + xor esi, edx + ; + rol edi, 9 + mov edx, edi + xor edi, esi + and edi, 0aaaaaaaah + xor edx, edi + xor esi, edi + ; + ror edx, 3 + ror esi, 2 + mov DWORD PTR 4[ebx],esi + mov esi, DWORD PTR 36[esp] + mov DWORD PTR [ebx],edx + mov edi, DWORD PTR 40[esp] + mov eax, DWORD PTR 44[esp] + mov DWORD PTR 8[esp],0 + mov DWORD PTR 4[esp],eax + mov DWORD PTR [esp],ebx + call _des_encrypt2 + mov DWORD PTR 8[esp],1 + mov DWORD PTR 4[esp],edi + mov DWORD PTR [esp],ebx + call _des_encrypt2 + mov DWORD PTR 8[esp],0 + mov DWORD PTR 4[esp],esi + mov DWORD PTR [esp],ebx + call _des_encrypt2 + add esp, 12 + mov edi, DWORD PTR [ebx] + mov esi, DWORD PTR 4[ebx] + ; + ; FP + rol esi, 2 + rol edi, 3 + mov eax, edi + xor edi, esi + and edi, 0aaaaaaaah + xor eax, edi + xor esi, edi + ; + rol eax, 23 + mov edi, eax + xor eax, esi + and eax, 003fc03fch + xor edi, eax + xor esi, eax + ; + rol edi, 10 + mov eax, edi + xor edi, esi + and edi, 033333333h + xor eax, edi + xor esi, edi + ; + rol esi, 18 + mov edi, esi + xor esi, eax + and esi, 0fff0000fh + xor edi, esi + xor eax, esi + ; + rol edi, 12 + mov esi, edi + xor edi, eax + and edi, 0f0f0f0f0h + xor esi, edi + xor eax, edi + ; + ror eax, 4 + mov DWORD PTR [ebx],eax + mov DWORD PTR 4[ebx],esi + pop edi + pop esi + pop ebp + pop ebx + ret +_des_decrypt3 ENDP +_TEXT ENDS +_TEXT SEGMENT +PUBLIC _des_ncbc_encrypt + +_des_ncbc_encrypt PROC NEAR + ; + push ebp + push ebx + push esi + push edi + mov ebp, DWORD PTR 28[esp] + ; getting iv ptr from parameter 4 + mov ebx, DWORD PTR 36[esp] + mov esi, DWORD PTR [ebx] + mov edi, DWORD PTR 4[ebx] + push edi + push esi + push edi + push esi + mov ebx, esp + mov esi, DWORD PTR 36[esp] + mov edi, DWORD PTR 40[esp] + ; getting encrypt flag from parameter 5 + mov ecx, DWORD PTR 56[esp] + ; get and push parameter 5 + push ecx + ; get and push parameter 3 + mov eax, DWORD PTR 52[esp] + push eax + push ebx + cmp ecx, 0 + jz $L004decrypt + and ebp, 4294967288 + mov eax, DWORD PTR 12[esp] + mov ebx, DWORD PTR 16[esp] + jz $L005encrypt_finish +L006encrypt_loop: + mov ecx, DWORD PTR [esi] + mov edx, DWORD PTR 4[esi] + xor eax, ecx + xor ebx, edx + mov DWORD PTR 12[esp],eax + mov DWORD PTR 16[esp],ebx + call _des_encrypt + mov eax, DWORD PTR 12[esp] + mov ebx, DWORD PTR 16[esp] + mov DWORD PTR [edi],eax + mov DWORD PTR 4[edi],ebx + add esi, 8 + add edi, 8 + sub ebp, 8 + jnz L006encrypt_loop +$L005encrypt_finish: + mov ebp, DWORD PTR 56[esp] + and ebp, 7 + jz $L007finish + xor ecx, ecx + xor edx, edx + mov ebp, DWORD PTR $L008cbc_enc_jmp_table[ebp*4] + jmp ebp +L009ej7: + mov dh, BYTE PTR 6[esi] + shl edx, 8 +L010ej6: + mov dh, BYTE PTR 5[esi] +L011ej5: + mov dl, BYTE PTR 4[esi] +L012ej4: + mov ecx, DWORD PTR [esi] + jmp $L013ejend +L014ej3: + mov ch, BYTE PTR 2[esi] + shl ecx, 8 +L015ej2: + mov ch, BYTE PTR 1[esi] +L016ej1: + mov cl, BYTE PTR [esi] +$L013ejend: + xor eax, ecx + xor ebx, edx + mov DWORD PTR 12[esp],eax + mov DWORD PTR 16[esp],ebx + call _des_encrypt + mov eax, DWORD PTR 12[esp] + mov ebx, DWORD PTR 16[esp] + mov DWORD PTR [edi],eax + mov DWORD PTR 4[edi],ebx + jmp $L007finish +$L004decrypt: + and ebp, 4294967288 + mov eax, DWORD PTR 20[esp] + mov ebx, DWORD PTR 24[esp] + jz $L017decrypt_finish +L018decrypt_loop: + mov eax, DWORD PTR [esi] + mov ebx, DWORD PTR 4[esi] + mov DWORD PTR 12[esp],eax + mov DWORD PTR 16[esp],ebx + call _des_encrypt + mov eax, DWORD PTR 12[esp] + mov ebx, DWORD PTR 16[esp] + mov ecx, DWORD PTR 20[esp] + mov edx, DWORD PTR 24[esp] + xor ecx, eax + xor edx, ebx + mov eax, DWORD PTR [esi] + mov ebx, DWORD PTR 4[esi] + mov DWORD PTR [edi],ecx + mov DWORD PTR 4[edi],edx + mov DWORD PTR 20[esp],eax + mov DWORD PTR 24[esp],ebx + add esi, 8 + add edi, 8 + sub ebp, 8 + jnz L018decrypt_loop +$L017decrypt_finish: + mov ebp, DWORD PTR 56[esp] + and ebp, 7 + jz $L007finish + mov eax, DWORD PTR [esi] + mov ebx, DWORD PTR 4[esi] + mov DWORD PTR 12[esp],eax + mov DWORD PTR 16[esp],ebx + call _des_encrypt + mov eax, DWORD PTR 12[esp] + mov ebx, DWORD PTR 16[esp] + mov ecx, DWORD PTR 20[esp] + mov edx, DWORD PTR 24[esp] + xor ecx, eax + xor edx, ebx + mov eax, DWORD PTR [esi] + mov ebx, DWORD PTR 4[esi] +L019dj7: + ror edx, 16 + mov BYTE PTR 6[edi],dl + shr edx, 16 +L020dj6: + mov BYTE PTR 5[edi],dh +L021dj5: + mov BYTE PTR 4[edi],dl +L022dj4: + mov DWORD PTR [edi],ecx + jmp $L023djend +L024dj3: + ror ecx, 16 + mov BYTE PTR 2[edi],cl + shl ecx, 16 +L025dj2: + mov BYTE PTR 1[esi],ch +L026dj1: + mov BYTE PTR [esi], cl +$L023djend: + jmp $L007finish +$L007finish: + mov ecx, DWORD PTR 64[esp] + add esp, 28 + mov DWORD PTR [ecx],eax + mov DWORD PTR 4[ecx],ebx + pop edi + pop esi + pop ebx + pop ebp + ret +$L008cbc_enc_jmp_table: + DD 0 + DD L016ej1 + DD L015ej2 + DD L014ej3 + DD L012ej4 + DD L011ej5 + DD L010ej6 + DD L009ej7 +L027cbc_dec_jmp_table: + DD 0 + DD L026dj1 + DD L025dj2 + DD L024dj3 + DD L022dj4 + DD L021dj5 + DD L020dj6 + DD L019dj7 +_des_ncbc_encrypt ENDP +_TEXT ENDS +_TEXT SEGMENT +PUBLIC _des_ede3_cbc_encrypt + +_des_ede3_cbc_encrypt PROC NEAR + ; + push ebp + push ebx + push esi + push edi + mov ebp, DWORD PTR 28[esp] + ; getting iv ptr from parameter 6 + mov ebx, DWORD PTR 44[esp] + mov esi, DWORD PTR [ebx] + mov edi, DWORD PTR 4[ebx] + push edi + push esi + push edi + push esi + mov ebx, esp + mov esi, DWORD PTR 36[esp] + mov edi, DWORD PTR 40[esp] + ; getting encrypt flag from parameter 7 + mov ecx, DWORD PTR 64[esp] + ; get and push parameter 5 + mov eax, DWORD PTR 56[esp] + push eax + ; get and push parameter 4 + mov eax, DWORD PTR 56[esp] + push eax + ; get and push parameter 3 + mov eax, DWORD PTR 56[esp] + push eax + push ebx + cmp ecx, 0 + jz $L028decrypt + and ebp, 4294967288 + mov eax, DWORD PTR 16[esp] + mov ebx, DWORD PTR 20[esp] + jz $L029encrypt_finish +L030encrypt_loop: + mov ecx, DWORD PTR [esi] + mov edx, DWORD PTR 4[esi] + xor eax, ecx + xor ebx, edx + mov DWORD PTR 16[esp],eax + mov DWORD PTR 20[esp],ebx + call _des_encrypt3 + mov eax, DWORD PTR 16[esp] + mov ebx, DWORD PTR 20[esp] + mov DWORD PTR [edi],eax + mov DWORD PTR 4[edi],ebx + add esi, 8 + add edi, 8 + sub ebp, 8 + jnz L030encrypt_loop +$L029encrypt_finish: + mov ebp, DWORD PTR 60[esp] + and ebp, 7 + jz $L031finish + xor ecx, ecx + xor edx, edx + mov ebp, DWORD PTR $L032cbc_enc_jmp_table[ebp*4] + jmp ebp +L033ej7: + mov dh, BYTE PTR 6[esi] + shl edx, 8 +L034ej6: + mov dh, BYTE PTR 5[esi] +L035ej5: + mov dl, BYTE PTR 4[esi] +L036ej4: + mov ecx, DWORD PTR [esi] + jmp $L037ejend +L038ej3: + mov ch, BYTE PTR 2[esi] + shl ecx, 8 +L039ej2: + mov ch, BYTE PTR 1[esi] +L040ej1: + mov cl, BYTE PTR [esi] +$L037ejend: + xor eax, ecx + xor ebx, edx + mov DWORD PTR 16[esp],eax + mov DWORD PTR 20[esp],ebx + call _des_encrypt3 + mov eax, DWORD PTR 16[esp] + mov ebx, DWORD PTR 20[esp] + mov DWORD PTR [edi],eax + mov DWORD PTR 4[edi],ebx + jmp $L031finish +$L028decrypt: + and ebp, 4294967288 + mov eax, DWORD PTR 24[esp] + mov ebx, DWORD PTR 28[esp] + jz $L041decrypt_finish +L042decrypt_loop: + mov eax, DWORD PTR [esi] + mov ebx, DWORD PTR 4[esi] + mov DWORD PTR 16[esp],eax + mov DWORD PTR 20[esp],ebx + call _des_decrypt3 + mov eax, DWORD PTR 16[esp] + mov ebx, DWORD PTR 20[esp] + mov ecx, DWORD PTR 24[esp] + mov edx, DWORD PTR 28[esp] + xor ecx, eax + xor edx, ebx + mov eax, DWORD PTR [esi] + mov ebx, DWORD PTR 4[esi] + mov DWORD PTR [edi],ecx + mov DWORD PTR 4[edi],edx + mov DWORD PTR 24[esp],eax + mov DWORD PTR 28[esp],ebx + add esi, 8 + add edi, 8 + sub ebp, 8 + jnz L042decrypt_loop +$L041decrypt_finish: + mov ebp, DWORD PTR 60[esp] + and ebp, 7 + jz $L031finish + mov eax, DWORD PTR [esi] + mov ebx, DWORD PTR 4[esi] + mov DWORD PTR 16[esp],eax + mov DWORD PTR 20[esp],ebx + call _des_decrypt3 + mov eax, DWORD PTR 16[esp] + mov ebx, DWORD PTR 20[esp] + mov ecx, DWORD PTR 24[esp] + mov edx, DWORD PTR 28[esp] + xor ecx, eax + xor edx, ebx + mov eax, DWORD PTR [esi] + mov ebx, DWORD PTR 4[esi] +L043dj7: + ror edx, 16 + mov BYTE PTR 6[edi],dl + shr edx, 16 +L044dj6: + mov BYTE PTR 5[edi],dh +L045dj5: + mov BYTE PTR 4[edi],dl +L046dj4: + mov DWORD PTR [edi],ecx + jmp $L047djend +L048dj3: + ror ecx, 16 + mov BYTE PTR 2[edi],cl + shl ecx, 16 +L049dj2: + mov BYTE PTR 1[esi],ch +L050dj1: + mov BYTE PTR [esi], cl +$L047djend: + jmp $L031finish +$L031finish: + mov ecx, DWORD PTR 76[esp] + add esp, 32 + mov DWORD PTR [ecx],eax + mov DWORD PTR 4[ecx],ebx + pop edi + pop esi + pop ebx + pop ebp + ret +$L032cbc_enc_jmp_table: + DD 0 + DD L040ej1 + DD L039ej2 + DD L038ej3 + DD L036ej4 + DD L035ej5 + DD L034ej6 + DD L033ej7 +L051cbc_dec_jmp_table: + DD 0 + DD L050dj1 + DD L049dj2 + DD L048dj3 + DD L046dj4 + DD L045dj5 + DD L044dj6 + DD L043dj7 +_des_ede3_cbc_encrypt ENDP +_TEXT ENDS +END diff --git a/src/lib/libcrypto/des/asm/dx86unix.cpp b/src/lib/libcrypto/des/asm/dx86unix.cpp new file mode 100644 index 0000000000..6fca9afa16 --- /dev/null +++ b/src/lib/libcrypto/des/asm/dx86unix.cpp @@ -0,0 +1,3202 @@ +/* Run the C pre-processor over this file with one of the following defined + * ELF - elf object files, + * OUT - a.out object files, + * BSDI - BSDI style a.out object files + * SOL - Solaris style elf + */ + +#define TYPE(a,b) .type a,b +#define SIZE(a,b) .size a,b + +#if defined(OUT) || defined(BSDI) +#define des_SPtrans _des_SPtrans +#define des_encrypt _des_encrypt +#define des_encrypt2 _des_encrypt2 +#define des_encrypt3 _des_encrypt3 +#define des_decrypt3 _des_decrypt3 +#define des_ncbc_encrypt _des_ncbc_encrypt +#define des_ede3_cbc_encrypt _des_ede3_cbc_encrypt + +#endif + +#ifdef OUT +#define OK 1 +#define ALIGN 4 +#endif + +#ifdef BSDI +#define OK 1 +#define ALIGN 4 +#undef SIZE +#undef TYPE +#define SIZE(a,b) +#define TYPE(a,b) +#endif + +#if defined(ELF) || defined(SOL) +#define OK 1 +#define ALIGN 16 +#endif + +#ifndef OK +You need to define one of +ELF - elf systems - linux-elf, NetBSD and DG-UX +OUT - a.out systems - linux-a.out and FreeBSD +SOL - solaris systems, which are elf with strange comment lines +BSDI - a.out with a very primative version of as. +#endif + +/* Let the Assembler begin :-) */ + /* Don't even think of reading this code */ + /* It was automatically generated by des-586.pl */ + /* Which is a perl program used to generate the x86 assember for */ + /* any of elf, a.out, BSDI,Win32, or Solaris */ + /* eric */ + + .file "des-586.s" + .version "01.01" +gcc2_compiled.: +.text + .align ALIGN +.globl des_encrypt + TYPE(des_encrypt,@function) +des_encrypt: + pushl %esi + pushl %edi + + /* Load the 2 words */ + movl 12(%esp), %esi + xorl %ecx, %ecx + pushl %ebx + pushl %ebp + movl (%esi), %eax + movl 28(%esp), %ebx + movl 4(%esi), %edi + + /* IP */ + roll $4, %eax + movl %eax, %esi + xorl %edi, %eax + andl $0xf0f0f0f0, %eax + xorl %eax, %esi + xorl %eax, %edi + + roll $20, %edi + movl %edi, %eax + xorl %esi, %edi + andl $0xfff0000f, %edi + xorl %edi, %eax + xorl %edi, %esi + + roll $14, %eax + movl %eax, %edi + xorl %esi, %eax + andl $0x33333333, %eax + xorl %eax, %edi + xorl %eax, %esi + + roll $22, %esi + movl %esi, %eax + xorl %edi, %esi + andl $0x03fc03fc, %esi + xorl %esi, %eax + xorl %esi, %edi + + roll $9, %eax + movl %eax, %esi + xorl %edi, %eax + andl $0xaaaaaaaa, %eax + xorl %eax, %esi + xorl %eax, %edi + +.byte 209 +.byte 199 /* roll $1 %edi */ + movl 24(%esp), %ebp + cmpl $0, %ebx + je .L000start_decrypt + + /* Round 0 */ + movl (%ebp), %eax + xorl %ebx, %ebx + movl 4(%ebp), %edx + xorl %esi, %eax + xorl %esi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 1 */ + movl 8(%ebp), %eax + xorl %ebx, %ebx + movl 12(%ebp), %edx + xorl %edi, %eax + xorl %edi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 2 */ + movl 16(%ebp), %eax + xorl %ebx, %ebx + movl 20(%ebp), %edx + xorl %esi, %eax + xorl %esi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 3 */ + movl 24(%ebp), %eax + xorl %ebx, %ebx + movl 28(%ebp), %edx + xorl %edi, %eax + xorl %edi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 4 */ + movl 32(%ebp), %eax + xorl %ebx, %ebx + movl 36(%ebp), %edx + xorl %esi, %eax + xorl %esi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 5 */ + movl 40(%ebp), %eax + xorl %ebx, %ebx + movl 44(%ebp), %edx + xorl %edi, %eax + xorl %edi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 6 */ + movl 48(%ebp), %eax + xorl %ebx, %ebx + movl 52(%ebp), %edx + xorl %esi, %eax + xorl %esi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 7 */ + movl 56(%ebp), %eax + xorl %ebx, %ebx + movl 60(%ebp), %edx + xorl %edi, %eax + xorl %edi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 8 */ + movl 64(%ebp), %eax + xorl %ebx, %ebx + movl 68(%ebp), %edx + xorl %esi, %eax + xorl %esi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 9 */ + movl 72(%ebp), %eax + xorl %ebx, %ebx + movl 76(%ebp), %edx + xorl %edi, %eax + xorl %edi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 10 */ + movl 80(%ebp), %eax + xorl %ebx, %ebx + movl 84(%ebp), %edx + xorl %esi, %eax + xorl %esi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 11 */ + movl 88(%ebp), %eax + xorl %ebx, %ebx + movl 92(%ebp), %edx + xorl %edi, %eax + xorl %edi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 12 */ + movl 96(%ebp), %eax + xorl %ebx, %ebx + movl 100(%ebp), %edx + xorl %esi, %eax + xorl %esi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 13 */ + movl 104(%ebp), %eax + xorl %ebx, %ebx + movl 108(%ebp), %edx + xorl %edi, %eax + xorl %edi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 14 */ + movl 112(%ebp), %eax + xorl %ebx, %ebx + movl 116(%ebp), %edx + xorl %esi, %eax + xorl %esi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 15 */ + movl 120(%ebp), %eax + xorl %ebx, %ebx + movl 124(%ebp), %edx + xorl %edi, %eax + xorl %edi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + jmp .L001end +.L000start_decrypt: + + /* Round 15 */ + movl 120(%ebp), %eax + xorl %ebx, %ebx + movl 124(%ebp), %edx + xorl %esi, %eax + xorl %esi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 14 */ + movl 112(%ebp), %eax + xorl %ebx, %ebx + movl 116(%ebp), %edx + xorl %edi, %eax + xorl %edi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 13 */ + movl 104(%ebp), %eax + xorl %ebx, %ebx + movl 108(%ebp), %edx + xorl %esi, %eax + xorl %esi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 12 */ + movl 96(%ebp), %eax + xorl %ebx, %ebx + movl 100(%ebp), %edx + xorl %edi, %eax + xorl %edi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 11 */ + movl 88(%ebp), %eax + xorl %ebx, %ebx + movl 92(%ebp), %edx + xorl %esi, %eax + xorl %esi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 10 */ + movl 80(%ebp), %eax + xorl %ebx, %ebx + movl 84(%ebp), %edx + xorl %edi, %eax + xorl %edi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 9 */ + movl 72(%ebp), %eax + xorl %ebx, %ebx + movl 76(%ebp), %edx + xorl %esi, %eax + xorl %esi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 8 */ + movl 64(%ebp), %eax + xorl %ebx, %ebx + movl 68(%ebp), %edx + xorl %edi, %eax + xorl %edi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 7 */ + movl 56(%ebp), %eax + xorl %ebx, %ebx + movl 60(%ebp), %edx + xorl %esi, %eax + xorl %esi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 6 */ + movl 48(%ebp), %eax + xorl %ebx, %ebx + movl 52(%ebp), %edx + xorl %edi, %eax + xorl %edi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 5 */ + movl 40(%ebp), %eax + xorl %ebx, %ebx + movl 44(%ebp), %edx + xorl %esi, %eax + xorl %esi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 4 */ + movl 32(%ebp), %eax + xorl %ebx, %ebx + movl 36(%ebp), %edx + xorl %edi, %eax + xorl %edi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 3 */ + movl 24(%ebp), %eax + xorl %ebx, %ebx + movl 28(%ebp), %edx + xorl %esi, %eax + xorl %esi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 2 */ + movl 16(%ebp), %eax + xorl %ebx, %ebx + movl 20(%ebp), %edx + xorl %edi, %eax + xorl %edi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 1 */ + movl 8(%ebp), %eax + xorl %ebx, %ebx + movl 12(%ebp), %edx + xorl %esi, %eax + xorl %esi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 0 */ + movl (%ebp), %eax + xorl %ebx, %ebx + movl 4(%ebp), %edx + xorl %edi, %eax + xorl %edi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi +.L001end: + + /* FP */ + movl 20(%esp), %edx +.byte 209 +.byte 206 /* rorl $1 %esi */ + movl %edi, %eax + xorl %esi, %edi + andl $0xaaaaaaaa, %edi + xorl %edi, %eax + xorl %edi, %esi + + roll $23, %eax + movl %eax, %edi + xorl %esi, %eax + andl $0x03fc03fc, %eax + xorl %eax, %edi + xorl %eax, %esi + + roll $10, %edi + movl %edi, %eax + xorl %esi, %edi + andl $0x33333333, %edi + xorl %edi, %eax + xorl %edi, %esi + + roll $18, %esi + movl %esi, %edi + xorl %eax, %esi + andl $0xfff0000f, %esi + xorl %esi, %edi + xorl %esi, %eax + + roll $12, %edi + movl %edi, %esi + xorl %eax, %edi + andl $0xf0f0f0f0, %edi + xorl %edi, %esi + xorl %edi, %eax + + rorl $4, %eax + movl %eax, (%edx) + movl %esi, 4(%edx) + popl %ebp + popl %ebx + popl %edi + popl %esi + ret +.des_encrypt_end: + SIZE(des_encrypt,.des_encrypt_end-des_encrypt) +.ident "desasm.pl" +.text + .align ALIGN +.globl des_encrypt2 + TYPE(des_encrypt2,@function) +des_encrypt2: + pushl %esi + pushl %edi + + /* Load the 2 words */ + movl 12(%esp), %eax + xorl %ecx, %ecx + pushl %ebx + pushl %ebp + movl (%eax), %esi + movl 28(%esp), %ebx + roll $3, %esi + movl 4(%eax), %edi + roll $3, %edi + movl 24(%esp), %ebp + cmpl $0, %ebx + je .L002start_decrypt + + /* Round 0 */ + movl (%ebp), %eax + xorl %ebx, %ebx + movl 4(%ebp), %edx + xorl %esi, %eax + xorl %esi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 1 */ + movl 8(%ebp), %eax + xorl %ebx, %ebx + movl 12(%ebp), %edx + xorl %edi, %eax + xorl %edi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 2 */ + movl 16(%ebp), %eax + xorl %ebx, %ebx + movl 20(%ebp), %edx + xorl %esi, %eax + xorl %esi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 3 */ + movl 24(%ebp), %eax + xorl %ebx, %ebx + movl 28(%ebp), %edx + xorl %edi, %eax + xorl %edi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 4 */ + movl 32(%ebp), %eax + xorl %ebx, %ebx + movl 36(%ebp), %edx + xorl %esi, %eax + xorl %esi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 5 */ + movl 40(%ebp), %eax + xorl %ebx, %ebx + movl 44(%ebp), %edx + xorl %edi, %eax + xorl %edi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 6 */ + movl 48(%ebp), %eax + xorl %ebx, %ebx + movl 52(%ebp), %edx + xorl %esi, %eax + xorl %esi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 7 */ + movl 56(%ebp), %eax + xorl %ebx, %ebx + movl 60(%ebp), %edx + xorl %edi, %eax + xorl %edi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 8 */ + movl 64(%ebp), %eax + xorl %ebx, %ebx + movl 68(%ebp), %edx + xorl %esi, %eax + xorl %esi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 9 */ + movl 72(%ebp), %eax + xorl %ebx, %ebx + movl 76(%ebp), %edx + xorl %edi, %eax + xorl %edi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 10 */ + movl 80(%ebp), %eax + xorl %ebx, %ebx + movl 84(%ebp), %edx + xorl %esi, %eax + xorl %esi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 11 */ + movl 88(%ebp), %eax + xorl %ebx, %ebx + movl 92(%ebp), %edx + xorl %edi, %eax + xorl %edi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 12 */ + movl 96(%ebp), %eax + xorl %ebx, %ebx + movl 100(%ebp), %edx + xorl %esi, %eax + xorl %esi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 13 */ + movl 104(%ebp), %eax + xorl %ebx, %ebx + movl 108(%ebp), %edx + xorl %edi, %eax + xorl %edi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 14 */ + movl 112(%ebp), %eax + xorl %ebx, %ebx + movl 116(%ebp), %edx + xorl %esi, %eax + xorl %esi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 15 */ + movl 120(%ebp), %eax + xorl %ebx, %ebx + movl 124(%ebp), %edx + xorl %edi, %eax + xorl %edi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + jmp .L003end +.L002start_decrypt: + + /* Round 15 */ + movl 120(%ebp), %eax + xorl %ebx, %ebx + movl 124(%ebp), %edx + xorl %esi, %eax + xorl %esi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 14 */ + movl 112(%ebp), %eax + xorl %ebx, %ebx + movl 116(%ebp), %edx + xorl %edi, %eax + xorl %edi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 13 */ + movl 104(%ebp), %eax + xorl %ebx, %ebx + movl 108(%ebp), %edx + xorl %esi, %eax + xorl %esi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 12 */ + movl 96(%ebp), %eax + xorl %ebx, %ebx + movl 100(%ebp), %edx + xorl %edi, %eax + xorl %edi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 11 */ + movl 88(%ebp), %eax + xorl %ebx, %ebx + movl 92(%ebp), %edx + xorl %esi, %eax + xorl %esi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 10 */ + movl 80(%ebp), %eax + xorl %ebx, %ebx + movl 84(%ebp), %edx + xorl %edi, %eax + xorl %edi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 9 */ + movl 72(%ebp), %eax + xorl %ebx, %ebx + movl 76(%ebp), %edx + xorl %esi, %eax + xorl %esi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 8 */ + movl 64(%ebp), %eax + xorl %ebx, %ebx + movl 68(%ebp), %edx + xorl %edi, %eax + xorl %edi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 7 */ + movl 56(%ebp), %eax + xorl %ebx, %ebx + movl 60(%ebp), %edx + xorl %esi, %eax + xorl %esi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 6 */ + movl 48(%ebp), %eax + xorl %ebx, %ebx + movl 52(%ebp), %edx + xorl %edi, %eax + xorl %edi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 5 */ + movl 40(%ebp), %eax + xorl %ebx, %ebx + movl 44(%ebp), %edx + xorl %esi, %eax + xorl %esi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 4 */ + movl 32(%ebp), %eax + xorl %ebx, %ebx + movl 36(%ebp), %edx + xorl %edi, %eax + xorl %edi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 3 */ + movl 24(%ebp), %eax + xorl %ebx, %ebx + movl 28(%ebp), %edx + xorl %esi, %eax + xorl %esi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 2 */ + movl 16(%ebp), %eax + xorl %ebx, %ebx + movl 20(%ebp), %edx + xorl %edi, %eax + xorl %edi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 1 */ + movl 8(%ebp), %eax + xorl %ebx, %ebx + movl 12(%ebp), %edx + xorl %esi, %eax + xorl %esi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 0 */ + movl (%ebp), %eax + xorl %ebx, %ebx + movl 4(%ebp), %edx + xorl %edi, %eax + xorl %edi, %edx + andl $0xfcfcfcfc, %eax + andl $0xcfcfcfcf, %edx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi +.L003end: + + /* Fixup */ + rorl $3, %edi + movl 20(%esp), %eax + rorl $3, %esi + movl %edi, (%eax) + movl %esi, 4(%eax) + popl %ebp + popl %ebx + popl %edi + popl %esi + ret +.des_encrypt2_end: + SIZE(des_encrypt2,.des_encrypt2_end-des_encrypt2) +.ident "desasm.pl" +.text + .align ALIGN +.globl des_encrypt3 + TYPE(des_encrypt3,@function) +des_encrypt3: + pushl %ebx + movl 8(%esp), %ebx + pushl %ebp + pushl %esi + pushl %edi + + /* Load the data words */ + movl (%ebx), %edi + movl 4(%ebx), %esi + subl $12, %esp + + /* IP */ + roll $4, %edi + movl %edi, %edx + xorl %esi, %edi + andl $0xf0f0f0f0, %edi + xorl %edi, %edx + xorl %edi, %esi + + roll $20, %esi + movl %esi, %edi + xorl %edx, %esi + andl $0xfff0000f, %esi + xorl %esi, %edi + xorl %esi, %edx + + roll $14, %edi + movl %edi, %esi + xorl %edx, %edi + andl $0x33333333, %edi + xorl %edi, %esi + xorl %edi, %edx + + roll $22, %edx + movl %edx, %edi + xorl %esi, %edx + andl $0x03fc03fc, %edx + xorl %edx, %edi + xorl %edx, %esi + + roll $9, %edi + movl %edi, %edx + xorl %esi, %edi + andl $0xaaaaaaaa, %edi + xorl %edi, %edx + xorl %edi, %esi + + rorl $3, %edx + rorl $2, %esi + movl %esi, 4(%ebx) + movl 36(%esp), %eax + movl %edx, (%ebx) + movl 40(%esp), %edi + movl 44(%esp), %esi + movl $1, 8(%esp) + movl %eax, 4(%esp) + movl %ebx, (%esp) + call des_encrypt2 + movl $0, 8(%esp) + movl %edi, 4(%esp) + movl %ebx, (%esp) + call des_encrypt2 + movl $1, 8(%esp) + movl %esi, 4(%esp) + movl %ebx, (%esp) + call des_encrypt2 + addl $12, %esp + movl (%ebx), %edi + movl 4(%ebx), %esi + + /* FP */ + roll $2, %esi + roll $3, %edi + movl %edi, %eax + xorl %esi, %edi + andl $0xaaaaaaaa, %edi + xorl %edi, %eax + xorl %edi, %esi + + roll $23, %eax + movl %eax, %edi + xorl %esi, %eax + andl $0x03fc03fc, %eax + xorl %eax, %edi + xorl %eax, %esi + + roll $10, %edi + movl %edi, %eax + xorl %esi, %edi + andl $0x33333333, %edi + xorl %edi, %eax + xorl %edi, %esi + + roll $18, %esi + movl %esi, %edi + xorl %eax, %esi + andl $0xfff0000f, %esi + xorl %esi, %edi + xorl %esi, %eax + + roll $12, %edi + movl %edi, %esi + xorl %eax, %edi + andl $0xf0f0f0f0, %edi + xorl %edi, %esi + xorl %edi, %eax + + rorl $4, %eax + movl %eax, (%ebx) + movl %esi, 4(%ebx) + popl %edi + popl %esi + popl %ebp + popl %ebx + ret +.des_encrypt3_end: + SIZE(des_encrypt3,.des_encrypt3_end-des_encrypt3) +.ident "desasm.pl" +.text + .align ALIGN +.globl des_decrypt3 + TYPE(des_decrypt3,@function) +des_decrypt3: + pushl %ebx + movl 8(%esp), %ebx + pushl %ebp + pushl %esi + pushl %edi + + /* Load the data words */ + movl (%ebx), %edi + movl 4(%ebx), %esi + subl $12, %esp + + /* IP */ + roll $4, %edi + movl %edi, %edx + xorl %esi, %edi + andl $0xf0f0f0f0, %edi + xorl %edi, %edx + xorl %edi, %esi + + roll $20, %esi + movl %esi, %edi + xorl %edx, %esi + andl $0xfff0000f, %esi + xorl %esi, %edi + xorl %esi, %edx + + roll $14, %edi + movl %edi, %esi + xorl %edx, %edi + andl $0x33333333, %edi + xorl %edi, %esi + xorl %edi, %edx + + roll $22, %edx + movl %edx, %edi + xorl %esi, %edx + andl $0x03fc03fc, %edx + xorl %edx, %edi + xorl %edx, %esi + + roll $9, %edi + movl %edi, %edx + xorl %esi, %edi + andl $0xaaaaaaaa, %edi + xorl %edi, %edx + xorl %edi, %esi + + rorl $3, %edx + rorl $2, %esi + movl %esi, 4(%ebx) + movl 36(%esp), %esi + movl %edx, (%ebx) + movl 40(%esp), %edi + movl 44(%esp), %eax + movl $0, 8(%esp) + movl %eax, 4(%esp) + movl %ebx, (%esp) + call des_encrypt2 + movl $1, 8(%esp) + movl %edi, 4(%esp) + movl %ebx, (%esp) + call des_encrypt2 + movl $0, 8(%esp) + movl %esi, 4(%esp) + movl %ebx, (%esp) + call des_encrypt2 + addl $12, %esp + movl (%ebx), %edi + movl 4(%ebx), %esi + + /* FP */ + roll $2, %esi + roll $3, %edi + movl %edi, %eax + xorl %esi, %edi + andl $0xaaaaaaaa, %edi + xorl %edi, %eax + xorl %edi, %esi + + roll $23, %eax + movl %eax, %edi + xorl %esi, %eax + andl $0x03fc03fc, %eax + xorl %eax, %edi + xorl %eax, %esi + + roll $10, %edi + movl %edi, %eax + xorl %esi, %edi + andl $0x33333333, %edi + xorl %edi, %eax + xorl %edi, %esi + + roll $18, %esi + movl %esi, %edi + xorl %eax, %esi + andl $0xfff0000f, %esi + xorl %esi, %edi + xorl %esi, %eax + + roll $12, %edi + movl %edi, %esi + xorl %eax, %edi + andl $0xf0f0f0f0, %edi + xorl %edi, %esi + xorl %edi, %eax + + rorl $4, %eax + movl %eax, (%ebx) + movl %esi, 4(%ebx) + popl %edi + popl %esi + popl %ebp + popl %ebx + ret +.des_decrypt3_end: + SIZE(des_decrypt3,.des_decrypt3_end-des_decrypt3) +.ident "desasm.pl" +.text + .align ALIGN +.globl des_ncbc_encrypt + TYPE(des_ncbc_encrypt,@function) +des_ncbc_encrypt: + + pushl %ebp + pushl %ebx + pushl %esi + pushl %edi + movl 28(%esp), %ebp + /* getting iv ptr from parameter 4 */ + movl 36(%esp), %ebx + movl (%ebx), %esi + movl 4(%ebx), %edi + pushl %edi + pushl %esi + pushl %edi + pushl %esi + movl %esp, %ebx + movl 36(%esp), %esi + movl 40(%esp), %edi + /* getting encrypt flag from parameter 5 */ + movl 56(%esp), %ecx + /* get and push parameter 5 */ + pushl %ecx + /* get and push parameter 3 */ + movl 52(%esp), %eax + pushl %eax + pushl %ebx + cmpl $0, %ecx + jz .L004decrypt + andl $4294967288, %ebp + movl 12(%esp), %eax + movl 16(%esp), %ebx + jz .L005encrypt_finish +.L006encrypt_loop: + movl (%esi), %ecx + movl 4(%esi), %edx + xorl %ecx, %eax + xorl %edx, %ebx + movl %eax, 12(%esp) + movl %ebx, 16(%esp) + call des_encrypt + movl 12(%esp), %eax + movl 16(%esp), %ebx + movl %eax, (%edi) + movl %ebx, 4(%edi) + addl $8, %esi + addl $8, %edi + subl $8, %ebp + jnz .L006encrypt_loop +.L005encrypt_finish: + movl 56(%esp), %ebp + andl $7, %ebp + jz .L007finish + xorl %ecx, %ecx + xorl %edx, %edx + movl .L008cbc_enc_jmp_table(,%ebp,4),%ebp + jmp *%ebp +.L009ej7: + movb 6(%esi), %dh + sall $8, %edx +.L010ej6: + movb 5(%esi), %dh +.L011ej5: + movb 4(%esi), %dl +.L012ej4: + movl (%esi), %ecx + jmp .L013ejend +.L014ej3: + movb 2(%esi), %ch + sall $8, %ecx +.L015ej2: + movb 1(%esi), %ch +.L016ej1: + movb (%esi), %cl +.L013ejend: + xorl %ecx, %eax + xorl %edx, %ebx + movl %eax, 12(%esp) + movl %ebx, 16(%esp) + call des_encrypt + movl 12(%esp), %eax + movl 16(%esp), %ebx + movl %eax, (%edi) + movl %ebx, 4(%edi) + jmp .L007finish +.align ALIGN +.L004decrypt: + andl $4294967288, %ebp + movl 20(%esp), %eax + movl 24(%esp), %ebx + jz .L017decrypt_finish +.L018decrypt_loop: + movl (%esi), %eax + movl 4(%esi), %ebx + movl %eax, 12(%esp) + movl %ebx, 16(%esp) + call des_encrypt + movl 12(%esp), %eax + movl 16(%esp), %ebx + movl 20(%esp), %ecx + movl 24(%esp), %edx + xorl %eax, %ecx + xorl %ebx, %edx + movl (%esi), %eax + movl 4(%esi), %ebx + movl %ecx, (%edi) + movl %edx, 4(%edi) + movl %eax, 20(%esp) + movl %ebx, 24(%esp) + addl $8, %esi + addl $8, %edi + subl $8, %ebp + jnz .L018decrypt_loop +.L017decrypt_finish: + movl 56(%esp), %ebp + andl $7, %ebp + jz .L007finish + movl (%esi), %eax + movl 4(%esi), %ebx + movl %eax, 12(%esp) + movl %ebx, 16(%esp) + call des_encrypt + movl 12(%esp), %eax + movl 16(%esp), %ebx + movl 20(%esp), %ecx + movl 24(%esp), %edx + xorl %eax, %ecx + xorl %ebx, %edx + movl (%esi), %eax + movl 4(%esi), %ebx +.L019dj7: + rorl $16, %edx + movb %dl, 6(%edi) + shrl $16, %edx +.L020dj6: + movb %dh, 5(%edi) +.L021dj5: + movb %dl, 4(%edi) +.L022dj4: + movl %ecx, (%edi) + jmp .L023djend +.L024dj3: + rorl $16, %ecx + movb %cl, 2(%edi) + sall $16, %ecx +.L025dj2: + movb %ch, 1(%esi) +.L026dj1: + movb %cl, (%esi) +.L023djend: + jmp .L007finish +.align ALIGN +.L007finish: + movl 64(%esp), %ecx + addl $28, %esp + movl %eax, (%ecx) + movl %ebx, 4(%ecx) + popl %edi + popl %esi + popl %ebx + popl %ebp + ret +.align ALIGN +.L008cbc_enc_jmp_table: + .long 0 + .long .L016ej1 + .long .L015ej2 + .long .L014ej3 + .long .L012ej4 + .long .L011ej5 + .long .L010ej6 + .long .L009ej7 +.align ALIGN +.L027cbc_dec_jmp_table: + .long 0 + .long .L026dj1 + .long .L025dj2 + .long .L024dj3 + .long .L022dj4 + .long .L021dj5 + .long .L020dj6 + .long .L019dj7 +.des_ncbc_encrypt_end: + SIZE(des_ncbc_encrypt,.des_ncbc_encrypt_end-des_ncbc_encrypt) +.ident "desasm.pl" +.text + .align ALIGN +.globl des_ede3_cbc_encrypt + TYPE(des_ede3_cbc_encrypt,@function) +des_ede3_cbc_encrypt: + + pushl %ebp + pushl %ebx + pushl %esi + pushl %edi + movl 28(%esp), %ebp + /* getting iv ptr from parameter 6 */ + movl 44(%esp), %ebx + movl (%ebx), %esi + movl 4(%ebx), %edi + pushl %edi + pushl %esi + pushl %edi + pushl %esi + movl %esp, %ebx + movl 36(%esp), %esi + movl 40(%esp), %edi + /* getting encrypt flag from parameter 7 */ + movl 64(%esp), %ecx + /* get and push parameter 5 */ + movl 56(%esp), %eax + pushl %eax + /* get and push parameter 4 */ + movl 56(%esp), %eax + pushl %eax + /* get and push parameter 3 */ + movl 56(%esp), %eax + pushl %eax + pushl %ebx + cmpl $0, %ecx + jz .L028decrypt + andl $4294967288, %ebp + movl 16(%esp), %eax + movl 20(%esp), %ebx + jz .L029encrypt_finish +.L030encrypt_loop: + movl (%esi), %ecx + movl 4(%esi), %edx + xorl %ecx, %eax + xorl %edx, %ebx + movl %eax, 16(%esp) + movl %ebx, 20(%esp) + call des_encrypt3 + movl 16(%esp), %eax + movl 20(%esp), %ebx + movl %eax, (%edi) + movl %ebx, 4(%edi) + addl $8, %esi + addl $8, %edi + subl $8, %ebp + jnz .L030encrypt_loop +.L029encrypt_finish: + movl 60(%esp), %ebp + andl $7, %ebp + jz .L031finish + xorl %ecx, %ecx + xorl %edx, %edx + movl .L032cbc_enc_jmp_table(,%ebp,4),%ebp + jmp *%ebp +.L033ej7: + movb 6(%esi), %dh + sall $8, %edx +.L034ej6: + movb 5(%esi), %dh +.L035ej5: + movb 4(%esi), %dl +.L036ej4: + movl (%esi), %ecx + jmp .L037ejend +.L038ej3: + movb 2(%esi), %ch + sall $8, %ecx +.L039ej2: + movb 1(%esi), %ch +.L040ej1: + movb (%esi), %cl +.L037ejend: + xorl %ecx, %eax + xorl %edx, %ebx + movl %eax, 16(%esp) + movl %ebx, 20(%esp) + call des_encrypt3 + movl 16(%esp), %eax + movl 20(%esp), %ebx + movl %eax, (%edi) + movl %ebx, 4(%edi) + jmp .L031finish +.align ALIGN +.L028decrypt: + andl $4294967288, %ebp + movl 24(%esp), %eax + movl 28(%esp), %ebx + jz .L041decrypt_finish +.L042decrypt_loop: + movl (%esi), %eax + movl 4(%esi), %ebx + movl %eax, 16(%esp) + movl %ebx, 20(%esp) + call des_decrypt3 + movl 16(%esp), %eax + movl 20(%esp), %ebx + movl 24(%esp), %ecx + movl 28(%esp), %edx + xorl %eax, %ecx + xorl %ebx, %edx + movl (%esi), %eax + movl 4(%esi), %ebx + movl %ecx, (%edi) + movl %edx, 4(%edi) + movl %eax, 24(%esp) + movl %ebx, 28(%esp) + addl $8, %esi + addl $8, %edi + subl $8, %ebp + jnz .L042decrypt_loop +.L041decrypt_finish: + movl 60(%esp), %ebp + andl $7, %ebp + jz .L031finish + movl (%esi), %eax + movl 4(%esi), %ebx + movl %eax, 16(%esp) + movl %ebx, 20(%esp) + call des_decrypt3 + movl 16(%esp), %eax + movl 20(%esp), %ebx + movl 24(%esp), %ecx + movl 28(%esp), %edx + xorl %eax, %ecx + xorl %ebx, %edx + movl (%esi), %eax + movl 4(%esi), %ebx +.L043dj7: + rorl $16, %edx + movb %dl, 6(%edi) + shrl $16, %edx +.L044dj6: + movb %dh, 5(%edi) +.L045dj5: + movb %dl, 4(%edi) +.L046dj4: + movl %ecx, (%edi) + jmp .L047djend +.L048dj3: + rorl $16, %ecx + movb %cl, 2(%edi) + sall $16, %ecx +.L049dj2: + movb %ch, 1(%esi) +.L050dj1: + movb %cl, (%esi) +.L047djend: + jmp .L031finish +.align ALIGN +.L031finish: + movl 76(%esp), %ecx + addl $32, %esp + movl %eax, (%ecx) + movl %ebx, 4(%ecx) + popl %edi + popl %esi + popl %ebx + popl %ebp + ret +.align ALIGN +.L032cbc_enc_jmp_table: + .long 0 + .long .L040ej1 + .long .L039ej2 + .long .L038ej3 + .long .L036ej4 + .long .L035ej5 + .long .L034ej6 + .long .L033ej7 +.align ALIGN +.L051cbc_dec_jmp_table: + .long 0 + .long .L050dj1 + .long .L049dj2 + .long .L048dj3 + .long .L046dj4 + .long .L045dj5 + .long .L044dj6 + .long .L043dj7 +.des_ede3_cbc_encrypt_end: + SIZE(des_ede3_cbc_encrypt,.des_ede3_cbc_encrypt_end-des_ede3_cbc_encrypt) +.ident "desasm.pl" diff --git a/src/lib/libcrypto/des/asm/y-win32.asm b/src/lib/libcrypto/des/asm/y-win32.asm new file mode 100644 index 0000000000..af5c102422 --- /dev/null +++ b/src/lib/libcrypto/des/asm/y-win32.asm @@ -0,0 +1,929 @@ + ; Don't even think of reading this code + ; It was automatically generated by crypt586.pl + ; Which is a perl program used to generate the x86 assember for + ; any of elf, a.out, BSDI,Win32, or Solaris + ; eric + ; + TITLE crypt586.asm + .386 +.model FLAT +_TEXT SEGMENT +PUBLIC _fcrypt_body +EXTRN _des_SPtrans:DWORD +_fcrypt_body PROC NEAR + push ebp + push ebx + push esi + push edi + ; + ; Load the 2 words + xor edi, edi + xor esi, esi + mov ebp, DWORD PTR 24[esp] + push 25 +L000start: + ; + ; Round 0 + mov eax, DWORD PTR 32[esp] + mov edx, esi + shr edx, 16 + mov ecx, DWORD PTR 36[esp] + xor edx, esi + and eax, edx + and edx, ecx + mov ebx, eax + shl ebx, 16 + mov ecx, edx + shl ecx, 16 + xor eax, ebx + xor edx, ecx + mov ebx, DWORD PTR [ebp] + xor eax, ebx + mov ecx, DWORD PTR 4[ebp] + xor eax, esi + xor edx, esi + xor edx, ecx + and eax, 0fcfcfcfch + xor ebx, ebx + and edx, 0cfcfcfcfh + xor ecx, ecx + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 28[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 1 + mov eax, DWORD PTR 32[esp] + mov edx, edi + shr edx, 16 + mov ecx, DWORD PTR 36[esp] + xor edx, edi + and eax, edx + and edx, ecx + mov ebx, eax + shl ebx, 16 + mov ecx, edx + shl ecx, 16 + xor eax, ebx + xor edx, ecx + mov ebx, DWORD PTR 8[ebp] + xor eax, ebx + mov ecx, DWORD PTR 12[ebp] + xor eax, edi + xor edx, edi + xor edx, ecx + and eax, 0fcfcfcfch + xor ebx, ebx + and edx, 0cfcfcfcfh + xor ecx, ecx + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 28[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx + ; + ; Round 2 + mov eax, DWORD PTR 32[esp] + mov edx, esi + shr edx, 16 + mov ecx, DWORD PTR 36[esp] + xor edx, esi + and eax, edx + and edx, ecx + mov ebx, eax + shl ebx, 16 + mov ecx, edx + shl ecx, 16 + xor eax, ebx + xor edx, ecx + mov ebx, DWORD PTR 16[ebp] + xor eax, ebx + mov ecx, DWORD PTR 20[ebp] + xor eax, esi + xor edx, esi + xor edx, ecx + and eax, 0fcfcfcfch + xor ebx, ebx + and edx, 0cfcfcfcfh + xor ecx, ecx + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 28[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 3 + mov eax, DWORD PTR 32[esp] + mov edx, edi + shr edx, 16 + mov ecx, DWORD PTR 36[esp] + xor edx, edi + and eax, edx + and edx, ecx + mov ebx, eax + shl ebx, 16 + mov ecx, edx + shl ecx, 16 + xor eax, ebx + xor edx, ecx + mov ebx, DWORD PTR 24[ebp] + xor eax, ebx + mov ecx, DWORD PTR 28[ebp] + xor eax, edi + xor edx, edi + xor edx, ecx + and eax, 0fcfcfcfch + xor ebx, ebx + and edx, 0cfcfcfcfh + xor ecx, ecx + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 28[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx + ; + ; Round 4 + mov eax, DWORD PTR 32[esp] + mov edx, esi + shr edx, 16 + mov ecx, DWORD PTR 36[esp] + xor edx, esi + and eax, edx + and edx, ecx + mov ebx, eax + shl ebx, 16 + mov ecx, edx + shl ecx, 16 + xor eax, ebx + xor edx, ecx + mov ebx, DWORD PTR 32[ebp] + xor eax, ebx + mov ecx, DWORD PTR 36[ebp] + xor eax, esi + xor edx, esi + xor edx, ecx + and eax, 0fcfcfcfch + xor ebx, ebx + and edx, 0cfcfcfcfh + xor ecx, ecx + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 28[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 5 + mov eax, DWORD PTR 32[esp] + mov edx, edi + shr edx, 16 + mov ecx, DWORD PTR 36[esp] + xor edx, edi + and eax, edx + and edx, ecx + mov ebx, eax + shl ebx, 16 + mov ecx, edx + shl ecx, 16 + xor eax, ebx + xor edx, ecx + mov ebx, DWORD PTR 40[ebp] + xor eax, ebx + mov ecx, DWORD PTR 44[ebp] + xor eax, edi + xor edx, edi + xor edx, ecx + and eax, 0fcfcfcfch + xor ebx, ebx + and edx, 0cfcfcfcfh + xor ecx, ecx + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 28[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx + ; + ; Round 6 + mov eax, DWORD PTR 32[esp] + mov edx, esi + shr edx, 16 + mov ecx, DWORD PTR 36[esp] + xor edx, esi + and eax, edx + and edx, ecx + mov ebx, eax + shl ebx, 16 + mov ecx, edx + shl ecx, 16 + xor eax, ebx + xor edx, ecx + mov ebx, DWORD PTR 48[ebp] + xor eax, ebx + mov ecx, DWORD PTR 52[ebp] + xor eax, esi + xor edx, esi + xor edx, ecx + and eax, 0fcfcfcfch + xor ebx, ebx + and edx, 0cfcfcfcfh + xor ecx, ecx + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 28[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 7 + mov eax, DWORD PTR 32[esp] + mov edx, edi + shr edx, 16 + mov ecx, DWORD PTR 36[esp] + xor edx, edi + and eax, edx + and edx, ecx + mov ebx, eax + shl ebx, 16 + mov ecx, edx + shl ecx, 16 + xor eax, ebx + xor edx, ecx + mov ebx, DWORD PTR 56[ebp] + xor eax, ebx + mov ecx, DWORD PTR 60[ebp] + xor eax, edi + xor edx, edi + xor edx, ecx + and eax, 0fcfcfcfch + xor ebx, ebx + and edx, 0cfcfcfcfh + xor ecx, ecx + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 28[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx + ; + ; Round 8 + mov eax, DWORD PTR 32[esp] + mov edx, esi + shr edx, 16 + mov ecx, DWORD PTR 36[esp] + xor edx, esi + and eax, edx + and edx, ecx + mov ebx, eax + shl ebx, 16 + mov ecx, edx + shl ecx, 16 + xor eax, ebx + xor edx, ecx + mov ebx, DWORD PTR 64[ebp] + xor eax, ebx + mov ecx, DWORD PTR 68[ebp] + xor eax, esi + xor edx, esi + xor edx, ecx + and eax, 0fcfcfcfch + xor ebx, ebx + and edx, 0cfcfcfcfh + xor ecx, ecx + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 28[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 9 + mov eax, DWORD PTR 32[esp] + mov edx, edi + shr edx, 16 + mov ecx, DWORD PTR 36[esp] + xor edx, edi + and eax, edx + and edx, ecx + mov ebx, eax + shl ebx, 16 + mov ecx, edx + shl ecx, 16 + xor eax, ebx + xor edx, ecx + mov ebx, DWORD PTR 72[ebp] + xor eax, ebx + mov ecx, DWORD PTR 76[ebp] + xor eax, edi + xor edx, edi + xor edx, ecx + and eax, 0fcfcfcfch + xor ebx, ebx + and edx, 0cfcfcfcfh + xor ecx, ecx + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 28[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx + ; + ; Round 10 + mov eax, DWORD PTR 32[esp] + mov edx, esi + shr edx, 16 + mov ecx, DWORD PTR 36[esp] + xor edx, esi + and eax, edx + and edx, ecx + mov ebx, eax + shl ebx, 16 + mov ecx, edx + shl ecx, 16 + xor eax, ebx + xor edx, ecx + mov ebx, DWORD PTR 80[ebp] + xor eax, ebx + mov ecx, DWORD PTR 84[ebp] + xor eax, esi + xor edx, esi + xor edx, ecx + and eax, 0fcfcfcfch + xor ebx, ebx + and edx, 0cfcfcfcfh + xor ecx, ecx + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 28[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 11 + mov eax, DWORD PTR 32[esp] + mov edx, edi + shr edx, 16 + mov ecx, DWORD PTR 36[esp] + xor edx, edi + and eax, edx + and edx, ecx + mov ebx, eax + shl ebx, 16 + mov ecx, edx + shl ecx, 16 + xor eax, ebx + xor edx, ecx + mov ebx, DWORD PTR 88[ebp] + xor eax, ebx + mov ecx, DWORD PTR 92[ebp] + xor eax, edi + xor edx, edi + xor edx, ecx + and eax, 0fcfcfcfch + xor ebx, ebx + and edx, 0cfcfcfcfh + xor ecx, ecx + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 28[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx + ; + ; Round 12 + mov eax, DWORD PTR 32[esp] + mov edx, esi + shr edx, 16 + mov ecx, DWORD PTR 36[esp] + xor edx, esi + and eax, edx + and edx, ecx + mov ebx, eax + shl ebx, 16 + mov ecx, edx + shl ecx, 16 + xor eax, ebx + xor edx, ecx + mov ebx, DWORD PTR 96[ebp] + xor eax, ebx + mov ecx, DWORD PTR 100[ebp] + xor eax, esi + xor edx, esi + xor edx, ecx + and eax, 0fcfcfcfch + xor ebx, ebx + and edx, 0cfcfcfcfh + xor ecx, ecx + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 28[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 13 + mov eax, DWORD PTR 32[esp] + mov edx, edi + shr edx, 16 + mov ecx, DWORD PTR 36[esp] + xor edx, edi + and eax, edx + and edx, ecx + mov ebx, eax + shl ebx, 16 + mov ecx, edx + shl ecx, 16 + xor eax, ebx + xor edx, ecx + mov ebx, DWORD PTR 104[ebp] + xor eax, ebx + mov ecx, DWORD PTR 108[ebp] + xor eax, edi + xor edx, edi + xor edx, ecx + and eax, 0fcfcfcfch + xor ebx, ebx + and edx, 0cfcfcfcfh + xor ecx, ecx + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 28[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx + ; + ; Round 14 + mov eax, DWORD PTR 32[esp] + mov edx, esi + shr edx, 16 + mov ecx, DWORD PTR 36[esp] + xor edx, esi + and eax, edx + and edx, ecx + mov ebx, eax + shl ebx, 16 + mov ecx, edx + shl ecx, 16 + xor eax, ebx + xor edx, ecx + mov ebx, DWORD PTR 112[ebp] + xor eax, ebx + mov ecx, DWORD PTR 116[ebp] + xor eax, esi + xor edx, esi + xor edx, ecx + and eax, 0fcfcfcfch + xor ebx, ebx + and edx, 0cfcfcfcfh + xor ecx, ecx + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor edi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor edi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor edi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor edi, ebp + mov ebp, DWORD PTR 28[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor edi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor edi, ebx + ; + ; Round 15 + mov eax, DWORD PTR 32[esp] + mov edx, edi + shr edx, 16 + mov ecx, DWORD PTR 36[esp] + xor edx, edi + and eax, edx + and edx, ecx + mov ebx, eax + shl ebx, 16 + mov ecx, edx + shl ecx, 16 + xor eax, ebx + xor edx, ecx + mov ebx, DWORD PTR 120[ebp] + xor eax, ebx + mov ecx, DWORD PTR 124[ebp] + xor eax, edi + xor edx, edi + xor edx, ecx + and eax, 0fcfcfcfch + xor ebx, ebx + and edx, 0cfcfcfcfh + xor ecx, ecx + mov bl, al + mov cl, ah + ror edx, 4 + mov ebp, DWORD PTR _des_SPtrans[ebx] + mov bl, dl + xor esi, ebp + mov ebp, DWORD PTR _des_SPtrans[0200h+ecx] + xor esi, ebp + mov cl, dh + shr eax, 16 + mov ebp, DWORD PTR _des_SPtrans[0100h+ebx] + xor esi, ebp + mov bl, ah + shr edx, 16 + mov ebp, DWORD PTR _des_SPtrans[0300h+ecx] + xor esi, ebp + mov ebp, DWORD PTR 28[esp] + mov cl, dh + and eax, 0ffh + and edx, 0ffh + mov ebx, DWORD PTR _des_SPtrans[0600h+ebx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0700h+ecx] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0400h+eax] + xor esi, ebx + mov ebx, DWORD PTR _des_SPtrans[0500h+edx] + xor esi, ebx + mov ebx, DWORD PTR [esp] + mov eax, edi + dec ebx + mov edi, esi + mov esi, eax + mov DWORD PTR [esp],ebx + jnz L000start + ; + ; FP + mov edx, DWORD PTR 24[esp] + ror edi, 1 + mov eax, esi + xor esi, edi + and esi, 0aaaaaaaah + xor eax, esi + xor edi, esi + ; + rol eax, 23 + mov esi, eax + xor eax, edi + and eax, 003fc03fch + xor esi, eax + xor edi, eax + ; + rol esi, 10 + mov eax, esi + xor esi, edi + and esi, 033333333h + xor eax, esi + xor edi, esi + ; + rol edi, 18 + mov esi, edi + xor edi, eax + and edi, 0fff0000fh + xor esi, edi + xor eax, edi + ; + rol esi, 12 + mov edi, esi + xor esi, eax + and esi, 0f0f0f0f0h + xor edi, esi + xor eax, esi + ; + ror eax, 4 + mov DWORD PTR [edx],eax + mov DWORD PTR 4[edx],edi + pop ecx + pop edi + pop esi + pop ebx + pop ebp + ret +_fcrypt_body ENDP +_TEXT ENDS +END diff --git a/src/lib/libcrypto/des/asm/yx86unix.cpp b/src/lib/libcrypto/des/asm/yx86unix.cpp new file mode 100644 index 0000000000..8719e38607 --- /dev/null +++ b/src/lib/libcrypto/des/asm/yx86unix.cpp @@ -0,0 +1,976 @@ +/* Run the C pre-processor over this file with one of the following defined + * ELF - elf object files, + * OUT - a.out object files, + * BSDI - BSDI style a.out object files + * SOL - Solaris style elf + */ + +#define TYPE(a,b) .type a,b +#define SIZE(a,b) .size a,b + +#if defined(OUT) || defined(BSDI) +#define des_SPtrans _des_SPtrans +#define fcrypt_body _fcrypt_body + +#endif + +#ifdef OUT +#define OK 1 +#define ALIGN 4 +#endif + +#ifdef BSDI +#define OK 1 +#define ALIGN 4 +#undef SIZE +#undef TYPE +#define SIZE(a,b) +#define TYPE(a,b) +#endif + +#if defined(ELF) || defined(SOL) +#define OK 1 +#define ALIGN 16 +#endif + +#ifndef OK +You need to define one of +ELF - elf systems - linux-elf, NetBSD and DG-UX +OUT - a.out systems - linux-a.out and FreeBSD +SOL - solaris systems, which are elf with strange comment lines +BSDI - a.out with a very primative version of as. +#endif + +/* Let the Assembler begin :-) */ + /* Don't even think of reading this code */ + /* It was automatically generated by crypt586.pl */ + /* Which is a perl program used to generate the x86 assember for */ + /* any of elf, a.out, BSDI,Win32, or Solaris */ + /* eric */ + + .file "crypt586.s" + .version "01.01" +gcc2_compiled.: +.text + .align ALIGN +.globl fcrypt_body + TYPE(fcrypt_body,@function) +fcrypt_body: + pushl %ebp + pushl %ebx + pushl %esi + pushl %edi + + + /* Load the 2 words */ + xorl %edi, %edi + xorl %esi, %esi + movl 24(%esp), %ebp + pushl $25 +.L000start: + + /* Round 0 */ + movl 32(%esp), %eax + movl %esi, %edx + shrl $16, %edx + movl 36(%esp), %ecx + xorl %esi, %edx + andl %edx, %eax + andl %ecx, %edx + movl %eax, %ebx + sall $16, %ebx + movl %edx, %ecx + sall $16, %ecx + xorl %ebx, %eax + xorl %ecx, %edx + movl (%ebp), %ebx + xorl %ebx, %eax + movl 4(%ebp), %ecx + xorl %esi, %eax + xorl %esi, %edx + xorl %ecx, %edx + andl $0xfcfcfcfc, %eax + xorl %ebx, %ebx + andl $0xcfcfcfcf, %edx + xorl %ecx, %ecx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 28(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 1 */ + movl 32(%esp), %eax + movl %edi, %edx + shrl $16, %edx + movl 36(%esp), %ecx + xorl %edi, %edx + andl %edx, %eax + andl %ecx, %edx + movl %eax, %ebx + sall $16, %ebx + movl %edx, %ecx + sall $16, %ecx + xorl %ebx, %eax + xorl %ecx, %edx + movl 8(%ebp), %ebx + xorl %ebx, %eax + movl 12(%ebp), %ecx + xorl %edi, %eax + xorl %edi, %edx + xorl %ecx, %edx + andl $0xfcfcfcfc, %eax + xorl %ebx, %ebx + andl $0xcfcfcfcf, %edx + xorl %ecx, %ecx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 28(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 2 */ + movl 32(%esp), %eax + movl %esi, %edx + shrl $16, %edx + movl 36(%esp), %ecx + xorl %esi, %edx + andl %edx, %eax + andl %ecx, %edx + movl %eax, %ebx + sall $16, %ebx + movl %edx, %ecx + sall $16, %ecx + xorl %ebx, %eax + xorl %ecx, %edx + movl 16(%ebp), %ebx + xorl %ebx, %eax + movl 20(%ebp), %ecx + xorl %esi, %eax + xorl %esi, %edx + xorl %ecx, %edx + andl $0xfcfcfcfc, %eax + xorl %ebx, %ebx + andl $0xcfcfcfcf, %edx + xorl %ecx, %ecx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 28(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 3 */ + movl 32(%esp), %eax + movl %edi, %edx + shrl $16, %edx + movl 36(%esp), %ecx + xorl %edi, %edx + andl %edx, %eax + andl %ecx, %edx + movl %eax, %ebx + sall $16, %ebx + movl %edx, %ecx + sall $16, %ecx + xorl %ebx, %eax + xorl %ecx, %edx + movl 24(%ebp), %ebx + xorl %ebx, %eax + movl 28(%ebp), %ecx + xorl %edi, %eax + xorl %edi, %edx + xorl %ecx, %edx + andl $0xfcfcfcfc, %eax + xorl %ebx, %ebx + andl $0xcfcfcfcf, %edx + xorl %ecx, %ecx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 28(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 4 */ + movl 32(%esp), %eax + movl %esi, %edx + shrl $16, %edx + movl 36(%esp), %ecx + xorl %esi, %edx + andl %edx, %eax + andl %ecx, %edx + movl %eax, %ebx + sall $16, %ebx + movl %edx, %ecx + sall $16, %ecx + xorl %ebx, %eax + xorl %ecx, %edx + movl 32(%ebp), %ebx + xorl %ebx, %eax + movl 36(%ebp), %ecx + xorl %esi, %eax + xorl %esi, %edx + xorl %ecx, %edx + andl $0xfcfcfcfc, %eax + xorl %ebx, %ebx + andl $0xcfcfcfcf, %edx + xorl %ecx, %ecx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 28(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 5 */ + movl 32(%esp), %eax + movl %edi, %edx + shrl $16, %edx + movl 36(%esp), %ecx + xorl %edi, %edx + andl %edx, %eax + andl %ecx, %edx + movl %eax, %ebx + sall $16, %ebx + movl %edx, %ecx + sall $16, %ecx + xorl %ebx, %eax + xorl %ecx, %edx + movl 40(%ebp), %ebx + xorl %ebx, %eax + movl 44(%ebp), %ecx + xorl %edi, %eax + xorl %edi, %edx + xorl %ecx, %edx + andl $0xfcfcfcfc, %eax + xorl %ebx, %ebx + andl $0xcfcfcfcf, %edx + xorl %ecx, %ecx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 28(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 6 */ + movl 32(%esp), %eax + movl %esi, %edx + shrl $16, %edx + movl 36(%esp), %ecx + xorl %esi, %edx + andl %edx, %eax + andl %ecx, %edx + movl %eax, %ebx + sall $16, %ebx + movl %edx, %ecx + sall $16, %ecx + xorl %ebx, %eax + xorl %ecx, %edx + movl 48(%ebp), %ebx + xorl %ebx, %eax + movl 52(%ebp), %ecx + xorl %esi, %eax + xorl %esi, %edx + xorl %ecx, %edx + andl $0xfcfcfcfc, %eax + xorl %ebx, %ebx + andl $0xcfcfcfcf, %edx + xorl %ecx, %ecx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 28(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 7 */ + movl 32(%esp), %eax + movl %edi, %edx + shrl $16, %edx + movl 36(%esp), %ecx + xorl %edi, %edx + andl %edx, %eax + andl %ecx, %edx + movl %eax, %ebx + sall $16, %ebx + movl %edx, %ecx + sall $16, %ecx + xorl %ebx, %eax + xorl %ecx, %edx + movl 56(%ebp), %ebx + xorl %ebx, %eax + movl 60(%ebp), %ecx + xorl %edi, %eax + xorl %edi, %edx + xorl %ecx, %edx + andl $0xfcfcfcfc, %eax + xorl %ebx, %ebx + andl $0xcfcfcfcf, %edx + xorl %ecx, %ecx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 28(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 8 */ + movl 32(%esp), %eax + movl %esi, %edx + shrl $16, %edx + movl 36(%esp), %ecx + xorl %esi, %edx + andl %edx, %eax + andl %ecx, %edx + movl %eax, %ebx + sall $16, %ebx + movl %edx, %ecx + sall $16, %ecx + xorl %ebx, %eax + xorl %ecx, %edx + movl 64(%ebp), %ebx + xorl %ebx, %eax + movl 68(%ebp), %ecx + xorl %esi, %eax + xorl %esi, %edx + xorl %ecx, %edx + andl $0xfcfcfcfc, %eax + xorl %ebx, %ebx + andl $0xcfcfcfcf, %edx + xorl %ecx, %ecx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 28(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 9 */ + movl 32(%esp), %eax + movl %edi, %edx + shrl $16, %edx + movl 36(%esp), %ecx + xorl %edi, %edx + andl %edx, %eax + andl %ecx, %edx + movl %eax, %ebx + sall $16, %ebx + movl %edx, %ecx + sall $16, %ecx + xorl %ebx, %eax + xorl %ecx, %edx + movl 72(%ebp), %ebx + xorl %ebx, %eax + movl 76(%ebp), %ecx + xorl %edi, %eax + xorl %edi, %edx + xorl %ecx, %edx + andl $0xfcfcfcfc, %eax + xorl %ebx, %ebx + andl $0xcfcfcfcf, %edx + xorl %ecx, %ecx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 28(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 10 */ + movl 32(%esp), %eax + movl %esi, %edx + shrl $16, %edx + movl 36(%esp), %ecx + xorl %esi, %edx + andl %edx, %eax + andl %ecx, %edx + movl %eax, %ebx + sall $16, %ebx + movl %edx, %ecx + sall $16, %ecx + xorl %ebx, %eax + xorl %ecx, %edx + movl 80(%ebp), %ebx + xorl %ebx, %eax + movl 84(%ebp), %ecx + xorl %esi, %eax + xorl %esi, %edx + xorl %ecx, %edx + andl $0xfcfcfcfc, %eax + xorl %ebx, %ebx + andl $0xcfcfcfcf, %edx + xorl %ecx, %ecx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 28(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 11 */ + movl 32(%esp), %eax + movl %edi, %edx + shrl $16, %edx + movl 36(%esp), %ecx + xorl %edi, %edx + andl %edx, %eax + andl %ecx, %edx + movl %eax, %ebx + sall $16, %ebx + movl %edx, %ecx + sall $16, %ecx + xorl %ebx, %eax + xorl %ecx, %edx + movl 88(%ebp), %ebx + xorl %ebx, %eax + movl 92(%ebp), %ecx + xorl %edi, %eax + xorl %edi, %edx + xorl %ecx, %edx + andl $0xfcfcfcfc, %eax + xorl %ebx, %ebx + andl $0xcfcfcfcf, %edx + xorl %ecx, %ecx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 28(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 12 */ + movl 32(%esp), %eax + movl %esi, %edx + shrl $16, %edx + movl 36(%esp), %ecx + xorl %esi, %edx + andl %edx, %eax + andl %ecx, %edx + movl %eax, %ebx + sall $16, %ebx + movl %edx, %ecx + sall $16, %ecx + xorl %ebx, %eax + xorl %ecx, %edx + movl 96(%ebp), %ebx + xorl %ebx, %eax + movl 100(%ebp), %ecx + xorl %esi, %eax + xorl %esi, %edx + xorl %ecx, %edx + andl $0xfcfcfcfc, %eax + xorl %ebx, %ebx + andl $0xcfcfcfcf, %edx + xorl %ecx, %ecx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 28(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 13 */ + movl 32(%esp), %eax + movl %edi, %edx + shrl $16, %edx + movl 36(%esp), %ecx + xorl %edi, %edx + andl %edx, %eax + andl %ecx, %edx + movl %eax, %ebx + sall $16, %ebx + movl %edx, %ecx + sall $16, %ecx + xorl %ebx, %eax + xorl %ecx, %edx + movl 104(%ebp), %ebx + xorl %ebx, %eax + movl 108(%ebp), %ecx + xorl %edi, %eax + xorl %edi, %edx + xorl %ecx, %edx + andl $0xfcfcfcfc, %eax + xorl %ebx, %ebx + andl $0xcfcfcfcf, %edx + xorl %ecx, %ecx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 28(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 14 */ + movl 32(%esp), %eax + movl %esi, %edx + shrl $16, %edx + movl 36(%esp), %ecx + xorl %esi, %edx + andl %edx, %eax + andl %ecx, %edx + movl %eax, %ebx + sall $16, %ebx + movl %edx, %ecx + sall $16, %ecx + xorl %ebx, %eax + xorl %ecx, %edx + movl 112(%ebp), %ebx + xorl %ebx, %eax + movl 116(%ebp), %ecx + xorl %esi, %eax + xorl %esi, %edx + xorl %ecx, %edx + andl $0xfcfcfcfc, %eax + xorl %ebx, %ebx + andl $0xcfcfcfcf, %edx + xorl %ecx, %ecx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 28(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 15 */ + movl 32(%esp), %eax + movl %edi, %edx + shrl $16, %edx + movl 36(%esp), %ecx + xorl %edi, %edx + andl %edx, %eax + andl %ecx, %edx + movl %eax, %ebx + sall $16, %ebx + movl %edx, %ecx + sall $16, %ecx + xorl %ebx, %eax + xorl %ecx, %edx + movl 120(%ebp), %ebx + xorl %ebx, %eax + movl 124(%ebp), %ecx + xorl %edi, %eax + xorl %edi, %edx + xorl %ecx, %edx + andl $0xfcfcfcfc, %eax + xorl %ebx, %ebx + andl $0xcfcfcfcf, %edx + xorl %ecx, %ecx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 28(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + movl (%esp), %ebx + movl %edi, %eax + decl %ebx + movl %esi, %edi + movl %eax, %esi + movl %ebx, (%esp) + jnz .L000start + + /* FP */ + movl 24(%esp), %edx +.byte 209 +.byte 207 /* rorl $1 %edi */ + movl %esi, %eax + xorl %edi, %esi + andl $0xaaaaaaaa, %esi + xorl %esi, %eax + xorl %esi, %edi + + roll $23, %eax + movl %eax, %esi + xorl %edi, %eax + andl $0x03fc03fc, %eax + xorl %eax, %esi + xorl %eax, %edi + + roll $10, %esi + movl %esi, %eax + xorl %edi, %esi + andl $0x33333333, %esi + xorl %esi, %eax + xorl %esi, %edi + + roll $18, %edi + movl %edi, %esi + xorl %eax, %edi + andl $0xfff0000f, %edi + xorl %edi, %esi + xorl %edi, %eax + + roll $12, %esi + movl %esi, %edi + xorl %eax, %esi + andl $0xf0f0f0f0, %esi + xorl %esi, %edi + xorl %esi, %eax + + rorl $4, %eax + movl %eax, (%edx) + movl %edi, 4(%edx) + popl %ecx + popl %edi + popl %esi + popl %ebx + popl %ebp + ret +.fcrypt_body_end: + SIZE(fcrypt_body,.fcrypt_body_end-fcrypt_body) +.ident "fcrypt_body" -- cgit v1.2.3-55-g6feb