Index: crypto/openssl/crypto/x86_64cpuid.pl =================================================================== --- crypto/openssl/crypto/x86_64cpuid.pl (revision 222101) +++ crypto/openssl/crypto/x86_64cpuid.pl (working copy) @@ -94,9 +94,13 @@ OPENSSL_wipe_cpu: ret .size OPENSSL_wipe_cpu,.-OPENSSL_wipe_cpu +.extern OPENSSL_cpuid_setup +.hidden OPENSSL_cpuid_setup .section .init call OPENSSL_cpuid_setup +.hidden OPENSSL_ia32cap_P +.comm OPENSSL_ia32cap_P,8 ___ open STDOUT,"| $^X perlasm/x86_64-xlate.pl $output"; Index: secure/lib/libcrypto/opensslconf-ia64.h =================================================================== --- secure/lib/libcrypto/opensslconf-ia64.h (revision 222101) +++ secure/lib/libcrypto/opensslconf-ia64.h (working copy) @@ -5,35 +5,37 @@ /* OpenSSL was configured with the following options: */ #ifndef OPENSSL_DOING_MAKEDEPEND -/* Disabled by default in OpenSSL 0.9.8. */ + #ifndef OPENSSL_NO_CAMELLIA # define OPENSSL_NO_CAMELLIA #endif -/* Disabled by default in OpenSSL 0.9.8. */ +#ifndef OPENSSL_NO_CAPIENG +# define OPENSSL_NO_CAPIENG +#endif #ifndef OPENSSL_NO_CMS # define OPENSSL_NO_CMS #endif -/* Disabled by default in OpenSSL 0.9.8. */ -#ifndef OPENSSL_NO_SEED -# define OPENSSL_NO_SEED +#ifndef OPENSSL_NO_GMP +# define OPENSSL_NO_GMP #endif -/* jpake is marked experimental in OpenSSL 0.9.8. */ #ifndef OPENSSL_NO_JPAKE # define OPENSSL_NO_JPAKE #endif -/* libgmp is not in the FreeBSD base system. */ -#ifndef OPENSSL_NO_GMP -# define OPENSSL_NO_GMP -#endif -/* The Kerberos 5 support is MIT-specific. */ #ifndef OPENSSL_NO_KRB5 # define OPENSSL_NO_KRB5 #endif +#ifndef OPENSSL_NO_SEED +# define OPENSSL_NO_SEED +#endif #endif /* OPENSSL_DOING_MAKEDEPEND */ + #ifndef OPENSSL_THREADS # define OPENSSL_THREADS #endif +#ifndef OPENSSL_NO_ASM +# define OPENSSL_NO_ASM +#endif #ifndef OPENSSL_NO_STATIC_ENGINE # define OPENSSL_NO_STATIC_ENGINE #endif @@ -43,21 +45,46 @@ who haven't had the time to do the appropriate changes in their applications. */ #ifdef OPENSSL_ALGORITHM_DEFINES +# if defined(OPENSSL_NO_CAMELLIA) && !defined(NO_CAMELLIA) +# define NO_CAMELLIA +# endif +# if defined(OPENSSL_NO_CAPIENG) && !defined(NO_CAPIENG) +# define NO_CAPIENG +# endif +# if defined(OPENSSL_NO_CMS) && !defined(NO_CMS) +# define NO_CMS +# endif # if defined(OPENSSL_NO_GMP) && !defined(NO_GMP) # define NO_GMP # endif +# if defined(OPENSSL_NO_JPAKE) && !defined(NO_JPAKE) +# define NO_JPAKE +# endif # if defined(OPENSSL_NO_KRB5) && !defined(NO_KRB5) # define NO_KRB5 # endif +# if defined(OPENSSL_NO_SEED) && !defined(NO_SEED) +# define NO_SEED # endif -#ifdef OPENSSL_OTHER_DEFINES -# ifndef NO_ASM -# define NO_ASM -# endif #endif /* crypto/opensslconf.h.in */ +#ifdef OPENSSL_DOING_MAKEDEPEND + +/* Include any symbols here that have to be explicitly set to enable a feature + * that should be visible to makedepend. + * + * [Our "make depend" doesn't actually look at this, we use actual build settings + * instead; we want to make it easy to remove subdirectories with disabled algorithms.] + */ + +#ifndef OPENSSL_FIPS +#define OPENSSL_FIPS +#endif + +#endif + /* Generate 80386 code? */ #undef I386_ONLY @@ -102,7 +129,7 @@ * This enables code handling data aligned at natural CPU word * boundary. See crypto/rc4/rc4_enc.c for further details. */ -#undef RC4_CHUNK +#define RC4_CHUNK unsigned long #endif #endif @@ -110,7 +137,7 @@ /* If this is set to 'unsigned int' on a DEC Alpha, this gives about a * %20 speed up (longs are 8 bytes, int's are 4). */ #ifndef DES_LONG -#define DES_LONG unsigned int +#define DES_LONG unsigned long #endif #endif @@ -149,7 +176,7 @@ /* the following is tweaked from a config script, that is why it is a * protected undef/define */ #ifndef DES_PTR -#define DES_PTR +#undef DES_PTR #endif /* This helps C compiler generate the correct code for multiple functional @@ -160,7 +187,7 @@ #endif #ifndef DES_RISC2 -#define DES_RISC2 +#undef DES_RISC2 #endif #if defined(DES_RISC1) && defined(DES_RISC2) @@ -206,7 +233,7 @@ YOU SHOULD NOT HAVE BOTH DES_RISC1 AND DES_RISC2 D # define DES_PTR # define DES_RISC2 # define DES_UNROLL -#elif defined( i386 ) /* x86 boxes, should be gcc */ +#elif defined(i386) || defined(__i386__) /* x86 boxes, should be gcc */ # define DES_PTR # define DES_RISC1 # define DES_UNROLL Index: secure/lib/libcrypto/opensslconf-mips.h =================================================================== --- secure/lib/libcrypto/opensslconf-mips.h (revision 222101) +++ secure/lib/libcrypto/opensslconf-mips.h (working copy) @@ -5,35 +5,37 @@ /* OpenSSL was configured with the following options: */ #ifndef OPENSSL_DOING_MAKEDEPEND -/* Disabled by default in OpenSSL 0.9.8. */ + #ifndef OPENSSL_NO_CAMELLIA # define OPENSSL_NO_CAMELLIA #endif -/* Disabled by default in OpenSSL 0.9.8. */ +#ifndef OPENSSL_NO_CAPIENG +# define OPENSSL_NO_CAPIENG +#endif #ifndef OPENSSL_NO_CMS # define OPENSSL_NO_CMS #endif -/* Disabled by default in OpenSSL 0.9.8. */ -#ifndef OPENSSL_NO_SEED -# define OPENSSL_NO_SEED +#ifndef OPENSSL_NO_GMP +# define OPENSSL_NO_GMP #endif -/* jpake is marked experimental in OpenSSL 0.9.8. */ #ifndef OPENSSL_NO_JPAKE # define OPENSSL_NO_JPAKE #endif -/* libgmp is not in the FreeBSD base system. */ -#ifndef OPENSSL_NO_GMP -# define OPENSSL_NO_GMP -#endif -/* The Kerberos 5 support is MIT-specific. */ #ifndef OPENSSL_NO_KRB5 # define OPENSSL_NO_KRB5 #endif +#ifndef OPENSSL_NO_SEED +# define OPENSSL_NO_SEED +#endif #endif /* OPENSSL_DOING_MAKEDEPEND */ + #ifndef OPENSSL_THREADS # define OPENSSL_THREADS #endif +#ifndef OPENSSL_NO_ASM +# define OPENSSL_NO_ASM +#endif #ifndef OPENSSL_NO_STATIC_ENGINE # define OPENSSL_NO_STATIC_ENGINE #endif @@ -43,21 +45,46 @@ who haven't had the time to do the appropriate changes in their applications. */ #ifdef OPENSSL_ALGORITHM_DEFINES +# if defined(OPENSSL_NO_CAMELLIA) && !defined(NO_CAMELLIA) +# define NO_CAMELLIA +# endif +# if defined(OPENSSL_NO_CAPIENG) && !defined(NO_CAPIENG) +# define NO_CAPIENG +# endif +# if defined(OPENSSL_NO_CMS) && !defined(NO_CMS) +# define NO_CMS +# endif # if defined(OPENSSL_NO_GMP) && !defined(NO_GMP) # define NO_GMP # endif +# if defined(OPENSSL_NO_JPAKE) && !defined(NO_JPAKE) +# define NO_JPAKE +# endif # if defined(OPENSSL_NO_KRB5) && !defined(NO_KRB5) # define NO_KRB5 # endif +# if defined(OPENSSL_NO_SEED) && !defined(NO_SEED) +# define NO_SEED # endif -#ifdef OPENSSL_OTHER_DEFINES -# ifndef NO_ASM -# define NO_ASM -# endif #endif /* crypto/opensslconf.h.in */ +#ifdef OPENSSL_DOING_MAKEDEPEND + +/* Include any symbols here that have to be explicitly set to enable a feature + * that should be visible to makedepend. + * + * [Our "make depend" doesn't actually look at this, we use actual build settings + * instead; we want to make it easy to remove subdirectories with disabled algorithms.] + */ + +#ifndef OPENSSL_FIPS +#define OPENSSL_FIPS +#endif + +#endif + /* Generate 80386 code? */ #undef I386_ONLY @@ -220,7 +247,7 @@ YOU SHOULD NOT HAVE BOTH DES_RISC1 AND DES_RISC2 D # define DES_PTR # define DES_RISC2 # define DES_UNROLL -#elif defined( i386 ) /* x86 boxes, should be gcc */ +#elif defined(i386) || defined(__i386__) /* x86 boxes, should be gcc */ # define DES_PTR # define DES_RISC1 # define DES_UNROLL Index: secure/lib/libcrypto/Makefile.inc =================================================================== --- secure/lib/libcrypto/Makefile.inc (revision 222101) +++ secure/lib/libcrypto/Makefile.inc (working copy) @@ -17,8 +17,14 @@ CFLAGS+= -DOPENSSL_THREADS -DDSO_DLFCN -DHAVE_DLFC CFLAGS+= -DOPENSSL_NO_IDEA .endif -.if ${MACHINE_ARCH} == "i386" || ${MACHINE_ARCH} == "amd64" -CFLAGS+= -DL_ENDIAN +.if ${MACHINE_CPUARCH} == "amd64" +CFLAGS+=-DL_ENDIAN +CFLAGS+=-DOPENSSL_BN_ASM_MONT +CFLAGS+=-DAES_ASM -DMD5_ASM -DSHA1_ASM -DSHA256_ASM -DSHA512_ASM +.elif ${MACHINE_CPUARCH} == "i386" +CFLAGS+=-DL_ENDIAN +CFLAGS+=-DOPENSSL_BN_ASM_PART_WORDS -DOPENSSL_IA32_SSE2 +CFLAGS+=-DAES_ASM -DMD5_ASM -DRMD160_ASM -DSHA1_ASM .endif MANDIR= ${SHAREDIR}/openssl/man/man Index: secure/lib/libcrypto/opensslconf-amd64.h =================================================================== --- secure/lib/libcrypto/opensslconf-amd64.h (revision 222101) +++ secure/lib/libcrypto/opensslconf-amd64.h (working copy) @@ -5,28 +5,28 @@ /* OpenSSL was configured with the following options: */ #ifndef OPENSSL_DOING_MAKEDEPEND -/* Disabled by default in OpenSSL 0.9.8. */ + +#ifndef OPENSSL_NO_CAPIENG +# define OPENSSL_NO_CAPIENG +#endif #ifndef OPENSSL_NO_CMS # define OPENSSL_NO_CMS #endif -/* Disabled by default in OpenSSL 0.9.8. */ -#ifndef OPENSSL_NO_SEED -# define OPENSSL_NO_SEED -#endif -/* libgmp is not in the FreeBSD base system. */ #ifndef OPENSSL_NO_GMP # define OPENSSL_NO_GMP #endif -/* jpake is marked experimental in OpenSSL 0.9.8. */ #ifndef OPENSSL_NO_JPAKE # define OPENSSL_NO_JPAKE #endif -/* The Kerberos 5 support is MIT-specific. */ #ifndef OPENSSL_NO_KRB5 # define OPENSSL_NO_KRB5 #endif +#ifndef OPENSSL_NO_SEED +# define OPENSSL_NO_SEED +#endif #endif /* OPENSSL_DOING_MAKEDEPEND */ + #ifndef OPENSSL_THREADS # define OPENSSL_THREADS #endif @@ -39,21 +39,45 @@ who haven't had the time to do the appropriate changes in their applications. */ #ifdef OPENSSL_ALGORITHM_DEFINES +# if defined(OPENSSL_NO_CAPIENG) && !defined(NO_CAPIENG) +# define NO_CAPIENG +# endif +# if defined(OPENSSL_NO_CMS) && !defined(NO_CMS) +# define NO_CMS +# endif # if defined(OPENSSL_NO_GMP) && !defined(NO_GMP) # define NO_GMP # endif +# if defined(OPENSSL_NO_JPAKE) && !defined(NO_JPAKE) +# define NO_JPAKE +# endif # if defined(OPENSSL_NO_KRB5) && !defined(NO_KRB5) # define NO_KRB5 # endif +# if defined(OPENSSL_NO_SEED) && !defined(NO_SEED) +# define NO_SEED # endif -#ifdef OPENSSL_OTHER_DEFINES -# ifndef NO_ASM -# define NO_ASM -# endif #endif +#define OPENSSL_CPUID_OBJ + /* crypto/opensslconf.h.in */ +#ifdef OPENSSL_DOING_MAKEDEPEND + +/* Include any symbols here that have to be explicitly set to enable a feature + * that should be visible to makedepend. + * + * [Our "make depend" doesn't actually look at this, we use actual build settings + * instead; we want to make it easy to remove subdirectories with disabled algorithms.] + */ + +#ifndef OPENSSL_FIPS +#define OPENSSL_FIPS +#endif + +#endif + /* Generate 80386 code? */ #undef I386_ONLY @@ -202,7 +226,7 @@ YOU SHOULD NOT HAVE BOTH DES_RISC1 AND DES_RISC2 D # define DES_PTR # define DES_RISC2 # define DES_UNROLL -#elif defined( i386 ) /* x86 boxes, should be gcc */ +#elif defined(i386) || defined(__i386__) /* x86 boxes, should be gcc */ # define DES_PTR # define DES_RISC1 # define DES_UNROLL Index: secure/lib/libcrypto/i386/bn-586.s =================================================================== --- secure/lib/libcrypto/i386/bn-586.s (revision 222101) +++ secure/lib/libcrypto/i386/bn-586.s (working copy) @@ -5,13 +5,11 @@ - .file "/usr/src/secure/lib/libcrypto/../../../crypto/openssl/crypto/bn/asm/bn-586.s" - .version "01.01" -gcc2_compiled.: + .file "bn-586.s" .text - .align 16 -.globl bn_mul_add_words - .type bn_mul_add_words,@function +.globl bn_mul_add_words +.type bn_mul_add_words,@function +.align 16 bn_mul_add_words: pushl %ebp pushl %ebx @@ -27,6 +25,75 @@ bn_mul_add_words: movl 32(%esp), %ebp pushl %ecx jz .L000maw_finish + leal OPENSSL_ia32cap_P,%eax + btl $26, (%eax) + jnc .L001maw_loop + movd %ebp, %mm0 + pxor %mm1, %mm1 +.L002maw_sse2_loop: + movd (%edi), %mm3 + paddq %mm3, %mm1 + movd (%ebx), %mm2 + pmuludq %mm0, %mm2 + movd 4(%ebx), %mm4 + pmuludq %mm0, %mm4 + movd 8(%ebx), %mm6 + pmuludq %mm0, %mm6 + movd 12(%ebx), %mm7 + pmuludq %mm0, %mm7 + paddq %mm2, %mm1 + movd 4(%edi), %mm3 + paddq %mm4, %mm3 + movd 8(%edi), %mm5 + paddq %mm6, %mm5 + movd 12(%edi), %mm4 + paddq %mm4, %mm7 + movd %mm1, (%edi) + movd 16(%ebx), %mm2 + pmuludq %mm0, %mm2 + psrlq $32, %mm1 + movd 20(%ebx), %mm4 + pmuludq %mm0, %mm4 + paddq %mm3, %mm1 + movd 24(%ebx), %mm6 + pmuludq %mm0, %mm6 + movd %mm1, 4(%edi) + psrlq $32, %mm1 + movd 28(%ebx), %mm3 + addl $32, %ebx + pmuludq %mm0, %mm3 + paddq %mm5, %mm1 + movd 16(%edi), %mm5 + paddq %mm5, %mm2 + movd %mm1, 8(%edi) + psrlq $32, %mm1 + paddq %mm7, %mm1 + movd 20(%edi), %mm5 + paddq %mm5, %mm4 + movd %mm1, 12(%edi) + psrlq $32, %mm1 + paddq %mm2, %mm1 + movd 24(%edi), %mm5 + paddq %mm5, %mm6 + movd %mm1, 16(%edi) + psrlq $32, %mm1 + paddq %mm4, %mm1 + movd 28(%edi), %mm5 + paddq %mm5, %mm3 + movd %mm1, 20(%edi) + psrlq $32, %mm1 + paddq %mm6, %mm1 + movd %mm1, 24(%edi) + psrlq $32, %mm1 + paddq %mm3, %mm1 + movd %mm1, 28(%edi) + addl $32, %edi + psrlq $32, %mm1 + subl $8, %ecx + jnz .L002maw_sse2_loop + movd %mm1, %esi + emms + jmp .L000maw_finish .L001maw_loop: movl %ecx, (%esp) @@ -118,10 +185,10 @@ bn_mul_add_words: .L000maw_finish: movl 32(%esp), %ecx andl $7, %ecx - jnz .L002maw_finish2 - jmp .L003maw_end + jnz .L003maw_finish2 + jmp .L004maw_end .align 16 -.L002maw_finish2: +.L003maw_finish2: movl (%ebx), %eax mull %ebp @@ -133,7 +200,7 @@ bn_mul_add_words: decl %ecx movl %eax, (%edi) movl %edx, %esi - jz .L003maw_end + jz .L004maw_end movl 4(%ebx), %eax mull %ebp @@ -145,7 +212,7 @@ bn_mul_add_words: decl %ecx movl %eax, 4(%edi) movl %edx, %esi - jz .L003maw_end + jz .L004maw_end movl 8(%ebx), %eax mull %ebp @@ -157,7 +224,7 @@ bn_mul_add_words: decl %ecx movl %eax, 8(%edi) movl %edx, %esi - jz .L003maw_end + jz .L004maw_end movl 12(%ebx), %eax mull %ebp @@ -169,7 +236,7 @@ bn_mul_add_words: decl %ecx movl %eax, 12(%edi) movl %edx, %esi - jz .L003maw_end + jz .L004maw_end movl 16(%ebx), %eax mull %ebp @@ -181,7 +248,7 @@ bn_mul_add_words: decl %ecx movl %eax, 16(%edi) movl %edx, %esi - jz .L003maw_end + jz .L004maw_end movl 20(%ebx), %eax mull %ebp @@ -193,7 +260,7 @@ bn_mul_add_words: decl %ecx movl %eax, 20(%edi) movl %edx, %esi - jz .L003maw_end + jz .L004maw_end movl 24(%ebx), %eax mull %ebp @@ -204,7 +271,7 @@ bn_mul_add_words: adcl $0, %edx movl %eax, 24(%edi) movl %edx, %esi -.L003maw_end: +.L004maw_end: movl %esi, %eax popl %ecx popl %edi @@ -213,12 +280,12 @@ bn_mul_add_words: popl %ebp ret .L_bn_mul_add_words_end: - .size bn_mul_add_words,.L_bn_mul_add_words_end-bn_mul_add_words +.size bn_mul_add_words,.L_bn_mul_add_words_end-bn_mul_add_words .ident "bn_mul_add_words" .text - .align 16 -.globl bn_mul_words - .type bn_mul_words,@function +.globl bn_mul_words +.type bn_mul_words,@function +.align 16 bn_mul_words: pushl %ebp pushl %ebx @@ -232,8 +299,8 @@ bn_mul_words: movl 28(%esp), %ebp movl 32(%esp), %ecx andl $4294967288, %ebp - jz .L004mw_finish -.L005mw_loop: + jz .L005mw_finish +.L006mw_loop: movl (%ebx), %eax mull %ecx @@ -294,15 +361,15 @@ bn_mul_words: addl $32, %ebx addl $32, %edi subl $8, %ebp - jz .L004mw_finish - jmp .L005mw_loop -.L004mw_finish: + jz .L005mw_finish + jmp .L006mw_loop +.L005mw_finish: movl 28(%esp), %ebp andl $7, %ebp - jnz .L006mw_finish2 - jmp .L007mw_end + jnz .L007mw_finish2 + jmp .L008mw_end .align 16 -.L006mw_finish2: +.L007mw_finish2: movl (%ebx), %eax mull %ecx @@ -311,7 +378,7 @@ bn_mul_words: movl %eax, (%edi) movl %edx, %esi decl %ebp - jz .L007mw_end + jz .L008mw_end movl 4(%ebx), %eax mull %ecx @@ -320,7 +387,7 @@ bn_mul_words: movl %eax, 4(%edi) movl %edx, %esi decl %ebp - jz .L007mw_end + jz .L008mw_end movl 8(%ebx), %eax mull %ecx @@ -329,7 +396,7 @@ bn_mul_words: movl %eax, 8(%edi) movl %edx, %esi decl %ebp - jz .L007mw_end + jz .L008mw_end movl 12(%ebx), %eax mull %ecx @@ -338,7 +405,7 @@ bn_mul_words: movl %eax, 12(%edi) movl %edx, %esi decl %ebp - jz .L007mw_end + jz .L008mw_end movl 16(%ebx), %eax mull %ecx @@ -347,7 +414,7 @@ bn_mul_words: movl %eax, 16(%edi) movl %edx, %esi decl %ebp - jz .L007mw_end + jz .L008mw_end movl 20(%ebx), %eax mull %ecx @@ -356,7 +423,7 @@ bn_mul_words: movl %eax, 20(%edi) movl %edx, %esi decl %ebp - jz .L007mw_end + jz .L008mw_end movl 24(%ebx), %eax mull %ecx @@ -364,7 +431,7 @@ bn_mul_words: adcl $0, %edx movl %eax, 24(%edi) movl %edx, %esi -.L007mw_end: +.L008mw_end: movl %esi, %eax popl %edi popl %esi @@ -372,12 +439,12 @@ bn_mul_words: popl %ebp ret .L_bn_mul_words_end: - .size bn_mul_words,.L_bn_mul_words_end-bn_mul_words +.size bn_mul_words,.L_bn_mul_words_end-bn_mul_words .ident "bn_mul_words" .text - .align 16 -.globl bn_sqr_words - .type bn_sqr_words,@function +.globl bn_sqr_words +.type bn_sqr_words,@function +.align 16 bn_sqr_words: pushl %ebp pushl %ebx @@ -389,8 +456,8 @@ bn_sqr_words: movl 24(%esp), %edi movl 28(%esp), %ebx andl $4294967288, %ebx - jz .L008sw_finish -.L009sw_loop: + jz .L009sw_finish +.L010sw_loop: movl (%edi), %eax mull %eax @@ -435,71 +502,71 @@ bn_sqr_words: addl $32, %edi addl $64, %esi subl $8, %ebx - jnz .L009sw_loop -.L008sw_finish: + jnz .L010sw_loop +.L009sw_finish: movl 28(%esp), %ebx andl $7, %ebx - jz .L010sw_end + jz .L011sw_end movl (%edi), %eax mull %eax movl %eax, (%esi) decl %ebx movl %edx, 4(%esi) - jz .L010sw_end + jz .L011sw_end movl 4(%edi), %eax mull %eax movl %eax, 8(%esi) decl %ebx movl %edx, 12(%esi) - jz .L010sw_end + jz .L011sw_end movl 8(%edi), %eax mull %eax movl %eax, 16(%esi) decl %ebx movl %edx, 20(%esi) - jz .L010sw_end + jz .L011sw_end movl 12(%edi), %eax mull %eax movl %eax, 24(%esi) decl %ebx movl %edx, 28(%esi) - jz .L010sw_end + jz .L011sw_end movl 16(%edi), %eax mull %eax movl %eax, 32(%esi) decl %ebx movl %edx, 36(%esi) - jz .L010sw_end + jz .L011sw_end movl 20(%edi), %eax mull %eax movl %eax, 40(%esi) decl %ebx movl %edx, 44(%esi) - jz .L010sw_end + jz .L011sw_end movl 24(%edi), %eax mull %eax movl %eax, 48(%esi) movl %edx, 52(%esi) -.L010sw_end: +.L011sw_end: popl %edi popl %esi popl %ebx popl %ebp ret .L_bn_sqr_words_end: - .size bn_sqr_words,.L_bn_sqr_words_end-bn_sqr_words +.size bn_sqr_words,.L_bn_sqr_words_end-bn_sqr_words .ident "bn_sqr_words" .text - .align 16 -.globl bn_div_words - .type bn_div_words,@function +.globl bn_div_words +.type bn_div_words,@function +.align 16 bn_div_words: pushl %ebp pushl %ebx @@ -516,12 +583,12 @@ bn_div_words: popl %ebp ret .L_bn_div_words_end: - .size bn_div_words,.L_bn_div_words_end-bn_div_words +.size bn_div_words,.L_bn_div_words_end-bn_div_words .ident "bn_div_words" .text - .align 16 -.globl bn_add_words - .type bn_add_words,@function +.globl bn_add_words +.type bn_add_words,@function +.align 16 bn_add_words: pushl %ebp pushl %ebx @@ -535,8 +602,8 @@ bn_add_words: movl 32(%esp), %ebp xorl %eax, %eax andl $4294967288, %ebp - jz .L011aw_finish -.L012aw_loop: + jz .L012aw_finish +.L013aw_loop: movl (%esi), %ecx movl (%edi), %edx @@ -614,11 +681,11 @@ bn_add_words: addl $32, %edi addl $32, %ebx subl $8, %ebp - jnz .L012aw_loop -.L011aw_finish: + jnz .L013aw_loop +.L012aw_finish: movl 32(%esp), %ebp andl $7, %ebp - jz .L013aw_end + jz .L014aw_end movl (%esi), %ecx movl (%edi), %edx @@ -629,7 +696,7 @@ bn_add_words: adcl $0, %eax decl %ebp movl %ecx, (%ebx) - jz .L013aw_end + jz .L014aw_end movl 4(%esi), %ecx movl 4(%edi), %edx @@ -640,7 +707,7 @@ bn_add_words: adcl $0, %eax decl %ebp movl %ecx, 4(%ebx) - jz .L013aw_end + jz .L014aw_end movl 8(%esi), %ecx movl 8(%edi), %edx @@ -651,7 +718,7 @@ bn_add_words: adcl $0, %eax decl %ebp movl %ecx, 8(%ebx) - jz .L013aw_end + jz .L014aw_end movl 12(%esi), %ecx movl 12(%edi), %edx @@ -662,7 +729,7 @@ bn_add_words: adcl $0, %eax decl %ebp movl %ecx, 12(%ebx) - jz .L013aw_end + jz .L014aw_end movl 16(%esi), %ecx movl 16(%edi), %edx @@ -673,7 +740,7 @@ bn_add_words: adcl $0, %eax decl %ebp movl %ecx, 16(%ebx) - jz .L013aw_end + jz .L014aw_end movl 20(%esi), %ecx movl 20(%edi), %edx @@ -684,7 +751,7 @@ bn_add_words: adcl $0, %eax decl %ebp movl %ecx, 20(%ebx) - jz .L013aw_end + jz .L014aw_end movl 24(%esi), %ecx movl 24(%edi), %edx @@ -694,19 +761,19 @@ bn_add_words: addl %edx, %ecx adcl $0, %eax movl %ecx, 24(%ebx) -.L013aw_end: +.L014aw_end: popl %edi popl %esi popl %ebx popl %ebp ret .L_bn_add_words_end: - .size bn_add_words,.L_bn_add_words_end-bn_add_words +.size bn_add_words,.L_bn_add_words_end-bn_add_words .ident "bn_add_words" .text - .align 16 -.globl bn_sub_words - .type bn_sub_words,@function +.globl bn_sub_words +.type bn_sub_words,@function +.align 16 bn_sub_words: pushl %ebp pushl %ebx @@ -720,8 +787,8 @@ bn_sub_words: movl 32(%esp), %ebp xorl %eax, %eax andl $4294967288, %ebp - jz .L014aw_finish -.L015aw_loop: + jz .L015aw_finish +.L016aw_loop: movl (%esi), %ecx movl (%edi), %edx @@ -799,11 +866,11 @@ bn_sub_words: addl $32, %edi addl $32, %ebx subl $8, %ebp - jnz .L015aw_loop -.L014aw_finish: + jnz .L016aw_loop +.L015aw_finish: movl 32(%esp), %ebp andl $7, %ebp - jz .L016aw_end + jz .L017aw_end movl (%esi), %ecx movl (%edi), %edx @@ -814,7 +881,7 @@ bn_sub_words: adcl $0, %eax decl %ebp movl %ecx, (%ebx) - jz .L016aw_end + jz .L017aw_end movl 4(%esi), %ecx movl 4(%edi), %edx @@ -825,7 +892,7 @@ bn_sub_words: adcl $0, %eax decl %ebp movl %ecx, 4(%ebx) - jz .L016aw_end + jz .L017aw_end movl 8(%esi), %ecx movl 8(%edi), %edx @@ -836,7 +903,7 @@ bn_sub_words: adcl $0, %eax decl %ebp movl %ecx, 8(%ebx) - jz .L016aw_end + jz .L017aw_end movl 12(%esi), %ecx movl 12(%edi), %edx @@ -847,7 +914,7 @@ bn_sub_words: adcl $0, %eax decl %ebp movl %ecx, 12(%ebx) - jz .L016aw_end + jz .L017aw_end movl 16(%esi), %ecx movl 16(%edi), %edx @@ -858,7 +925,7 @@ bn_sub_words: adcl $0, %eax decl %ebp movl %ecx, 16(%ebx) - jz .L016aw_end + jz .L017aw_end movl 20(%esi), %ecx movl 20(%edi), %edx @@ -869,7 +936,7 @@ bn_sub_words: adcl $0, %eax decl %ebp movl %ecx, 20(%ebx) - jz .L016aw_end + jz .L017aw_end movl 24(%esi), %ecx movl 24(%edi), %edx @@ -879,12 +946,564 @@ bn_sub_words: subl %edx, %ecx adcl $0, %eax movl %ecx, 24(%ebx) -.L016aw_end: +.L017aw_end: popl %edi popl %esi popl %ebx popl %ebp ret .L_bn_sub_words_end: - .size bn_sub_words,.L_bn_sub_words_end-bn_sub_words +.size bn_sub_words,.L_bn_sub_words_end-bn_sub_words .ident "bn_sub_words" +.text +.globl bn_sub_part_words +.type bn_sub_part_words,@function +.align 16 +bn_sub_part_words: + pushl %ebp + pushl %ebx + pushl %esi + pushl %edi + + + movl 20(%esp), %ebx + movl 24(%esp), %esi + movl 28(%esp), %edi + movl 32(%esp), %ebp + xorl %eax, %eax + andl $4294967288, %ebp + jz .L018aw_finish +.L019aw_loop: + + movl (%esi), %ecx + movl (%edi), %edx + subl %eax, %ecx + movl $0, %eax + adcl %eax, %eax + subl %edx, %ecx + adcl $0, %eax + movl %ecx, (%ebx) + + movl 4(%esi), %ecx + movl 4(%edi), %edx + subl %eax, %ecx + movl $0, %eax + adcl %eax, %eax + subl %edx, %ecx + adcl $0, %eax + movl %ecx, 4(%ebx) + + movl 8(%esi), %ecx + movl 8(%edi), %edx + subl %eax, %ecx + movl $0, %eax + adcl %eax, %eax + subl %edx, %ecx + adcl $0, %eax + movl %ecx, 8(%ebx) + + movl 12(%esi), %ecx + movl 12(%edi), %edx + subl %eax, %ecx + movl $0, %eax + adcl %eax, %eax + subl %edx, %ecx + adcl $0, %eax + movl %ecx, 12(%ebx) + + movl 16(%esi), %ecx + movl 16(%edi), %edx + subl %eax, %ecx + movl $0, %eax + adcl %eax, %eax + subl %edx, %ecx + adcl $0, %eax + movl %ecx, 16(%ebx) + + movl 20(%esi), %ecx + movl 20(%edi), %edx + subl %eax, %ecx + movl $0, %eax + adcl %eax, %eax + subl %edx, %ecx + adcl $0, %eax + movl %ecx, 20(%ebx) + + movl 24(%esi), %ecx + movl 24(%edi), %edx + subl %eax, %ecx + movl $0, %eax + adcl %eax, %eax + subl %edx, %ecx + adcl $0, %eax + movl %ecx, 24(%ebx) + + movl 28(%esi), %ecx + movl 28(%edi), %edx + subl %eax, %ecx + movl $0, %eax + adcl %eax, %eax + subl %edx, %ecx + adcl $0, %eax + movl %ecx, 28(%ebx) + + addl $32, %esi + addl $32, %edi + addl $32, %ebx + subl $8, %ebp + jnz .L019aw_loop +.L018aw_finish: + movl 32(%esp), %ebp + andl $7, %ebp + jz .L020aw_end + + movl (%esi), %ecx + movl (%edi), %edx + subl %eax, %ecx + movl $0, %eax + adcl %eax, %eax + subl %edx, %ecx + adcl $0, %eax + movl %ecx, (%ebx) + addl $4, %esi + addl $4, %edi + addl $4, %ebx + decl %ebp + jz .L020aw_end + + movl (%esi), %ecx + movl (%edi), %edx + subl %eax, %ecx + movl $0, %eax + adcl %eax, %eax + subl %edx, %ecx + adcl $0, %eax + movl %ecx, (%ebx) + addl $4, %esi + addl $4, %edi + addl $4, %ebx + decl %ebp + jz .L020aw_end + + movl (%esi), %ecx + movl (%edi), %edx + subl %eax, %ecx + movl $0, %eax + adcl %eax, %eax + subl %edx, %ecx + adcl $0, %eax + movl %ecx, (%ebx) + addl $4, %esi + addl $4, %edi + addl $4, %ebx + decl %ebp + jz .L020aw_end + + movl (%esi), %ecx + movl (%edi), %edx + subl %eax, %ecx + movl $0, %eax + adcl %eax, %eax + subl %edx, %ecx + adcl $0, %eax + movl %ecx, (%ebx) + addl $4, %esi + addl $4, %edi + addl $4, %ebx + decl %ebp + jz .L020aw_end + + movl (%esi), %ecx + movl (%edi), %edx + subl %eax, %ecx + movl $0, %eax + adcl %eax, %eax + subl %edx, %ecx + adcl $0, %eax + movl %ecx, (%ebx) + addl $4, %esi + addl $4, %edi + addl $4, %ebx + decl %ebp + jz .L020aw_end + + movl (%esi), %ecx + movl (%edi), %edx + subl %eax, %ecx + movl $0, %eax + adcl %eax, %eax + subl %edx, %ecx + adcl $0, %eax + movl %ecx, (%ebx) + addl $4, %esi + addl $4, %edi + addl $4, %ebx + decl %ebp + jz .L020aw_end + + movl (%esi), %ecx + movl (%edi), %edx + subl %eax, %ecx + movl $0, %eax + adcl %eax, %eax + subl %edx, %ecx + adcl $0, %eax + movl %ecx, (%ebx) + addl $4, %esi + addl $4, %edi + addl $4, %ebx +.L020aw_end: + cmpl $0, 36(%esp) + je .L021pw_end + movl 36(%esp), %ebp + cmpl $0, %ebp + je .L021pw_end + jge .L022pw_pos + + movl $0, %edx + subl %ebp, %edx + movl %edx, %ebp + andl $4294967288, %ebp + jz .L023pw_neg_finish +.L024pw_neg_loop: + + movl $0, %ecx + movl (%edi), %edx + subl %eax, %ecx + movl $0, %eax + adcl %eax, %eax + subl %edx, %ecx + adcl $0, %eax + movl %ecx, (%ebx) + + movl $0, %ecx + movl 4(%edi), %edx + subl %eax, %ecx + movl $0, %eax + adcl %eax, %eax + subl %edx, %ecx + adcl $0, %eax + movl %ecx, 4(%ebx) + + movl $0, %ecx + movl 8(%edi), %edx + subl %eax, %ecx + movl $0, %eax + adcl %eax, %eax + subl %edx, %ecx + adcl $0, %eax + movl %ecx, 8(%ebx) + + movl $0, %ecx + movl 12(%edi), %edx + subl %eax, %ecx + movl $0, %eax + adcl %eax, %eax + subl %edx, %ecx + adcl $0, %eax + movl %ecx, 12(%ebx) + + movl $0, %ecx + movl 16(%edi), %edx + subl %eax, %ecx + movl $0, %eax + adcl %eax, %eax + subl %edx, %ecx + adcl $0, %eax + movl %ecx, 16(%ebx) + + movl $0, %ecx + movl 20(%edi), %edx + subl %eax, %ecx + movl $0, %eax + adcl %eax, %eax + subl %edx, %ecx + adcl $0, %eax + movl %ecx, 20(%ebx) + + movl $0, %ecx + movl 24(%edi), %edx + subl %eax, %ecx + movl $0, %eax + adcl %eax, %eax + subl %edx, %ecx + adcl $0, %eax + movl %ecx, 24(%ebx) + + movl $0, %ecx + movl 28(%edi), %edx + subl %eax, %ecx + movl $0, %eax + adcl %eax, %eax + subl %edx, %ecx + adcl $0, %eax + movl %ecx, 28(%ebx) + + addl $32, %edi + addl $32, %ebx + subl $8, %ebp + jnz .L024pw_neg_loop +.L023pw_neg_finish: + movl 36(%esp), %edx + movl $0, %ebp + subl %edx, %ebp + andl $7, %ebp + jz .L021pw_end + + movl $0, %ecx + movl (%edi), %edx + subl %eax, %ecx + movl $0, %eax + adcl %eax, %eax + subl %edx, %ecx + adcl $0, %eax + decl %ebp + movl %ecx, (%ebx) + jz .L021pw_end + + movl $0, %ecx + movl 4(%edi), %edx + subl %eax, %ecx + movl $0, %eax + adcl %eax, %eax + subl %edx, %ecx + adcl $0, %eax + decl %ebp + movl %ecx, 4(%ebx) + jz .L021pw_end + + movl $0, %ecx + movl 8(%edi), %edx + subl %eax, %ecx + movl $0, %eax + adcl %eax, %eax + subl %edx, %ecx + adcl $0, %eax + decl %ebp + movl %ecx, 8(%ebx) + jz .L021pw_end + + movl $0, %ecx + movl 12(%edi), %edx + subl %eax, %ecx + movl $0, %eax + adcl %eax, %eax + subl %edx, %ecx + adcl $0, %eax + decl %ebp + movl %ecx, 12(%ebx) + jz .L021pw_end + + movl $0, %ecx + movl 16(%edi), %edx + subl %eax, %ecx + movl $0, %eax + adcl %eax, %eax + subl %edx, %ecx + adcl $0, %eax + decl %ebp + movl %ecx, 16(%ebx) + jz .L021pw_end + + movl $0, %ecx + movl 20(%edi), %edx + subl %eax, %ecx + movl $0, %eax + adcl %eax, %eax + subl %edx, %ecx + adcl $0, %eax + decl %ebp + movl %ecx, 20(%ebx) + jz .L021pw_end + + movl $0, %ecx + movl 24(%edi), %edx + subl %eax, %ecx + movl $0, %eax + adcl %eax, %eax + subl %edx, %ecx + adcl $0, %eax + movl %ecx, 24(%ebx) + jmp .L021pw_end +.L022pw_pos: + andl $4294967288, %ebp + jz .L025pw_pos_finish +.L026pw_pos_loop: + + movl (%esi), %ecx + subl %eax, %ecx + movl %ecx, (%ebx) + jnc .L027pw_nc0 + + movl 4(%esi), %ecx + subl %eax, %ecx + movl %ecx, 4(%ebx) + jnc .L028pw_nc1 + + movl 8(%esi), %ecx + subl %eax, %ecx + movl %ecx, 8(%ebx) + jnc .L029pw_nc2 + + movl 12(%esi), %ecx + subl %eax, %ecx + movl %ecx, 12(%ebx) + jnc .L030pw_nc3 + + movl 16(%esi), %ecx + subl %eax, %ecx + movl %ecx, 16(%ebx) + jnc .L031pw_nc4 + + movl 20(%esi), %ecx + subl %eax, %ecx + movl %ecx, 20(%ebx) + jnc .L032pw_nc5 + + movl 24(%esi), %ecx + subl %eax, %ecx + movl %ecx, 24(%ebx) + jnc .L033pw_nc6 + + movl 28(%esi), %ecx + subl %eax, %ecx + movl %ecx, 28(%ebx) + jnc .L034pw_nc7 + + addl $32, %esi + addl $32, %ebx + subl $8, %ebp + jnz .L026pw_pos_loop +.L025pw_pos_finish: + movl 36(%esp), %ebp + andl $7, %ebp + jz .L021pw_end + + movl (%esi), %ecx + subl %eax, %ecx + movl %ecx, (%ebx) + jnc .L035pw_tail_nc0 + decl %ebp + jz .L021pw_end + + movl 4(%esi), %ecx + subl %eax, %ecx + movl %ecx, 4(%ebx) + jnc .L036pw_tail_nc1 + decl %ebp + jz .L021pw_end + + movl 8(%esi), %ecx + subl %eax, %ecx + movl %ecx, 8(%ebx) + jnc .L037pw_tail_nc2 + decl %ebp + jz .L021pw_end + + movl 12(%esi), %ecx + subl %eax, %ecx + movl %ecx, 12(%ebx) + jnc .L038pw_tail_nc3 + decl %ebp + jz .L021pw_end + + movl 16(%esi), %ecx + subl %eax, %ecx + movl %ecx, 16(%ebx) + jnc .L039pw_tail_nc4 + decl %ebp + jz .L021pw_end + + movl 20(%esi), %ecx + subl %eax, %ecx + movl %ecx, 20(%ebx) + jnc .L040pw_tail_nc5 + decl %ebp + jz .L021pw_end + + movl 24(%esi), %ecx + subl %eax, %ecx + movl %ecx, 24(%ebx) + jnc .L041pw_tail_nc6 + movl $1, %eax + jmp .L021pw_end +.L042pw_nc_loop: + movl (%esi), %ecx + movl %ecx, (%ebx) +.L027pw_nc0: + movl 4(%esi), %ecx + movl %ecx, 4(%ebx) +.L028pw_nc1: + movl 8(%esi), %ecx + movl %ecx, 8(%ebx) +.L029pw_nc2: + movl 12(%esi), %ecx + movl %ecx, 12(%ebx) +.L030pw_nc3: + movl 16(%esi), %ecx + movl %ecx, 16(%ebx) +.L031pw_nc4: + movl 20(%esi), %ecx + movl %ecx, 20(%ebx) +.L032pw_nc5: + movl 24(%esi), %ecx + movl %ecx, 24(%ebx) +.L033pw_nc6: + movl 28(%esi), %ecx + movl %ecx, 28(%ebx) +.L034pw_nc7: + + addl $32, %esi + addl $32, %ebx + subl $8, %ebp + jnz .L042pw_nc_loop + movl 36(%esp), %ebp + andl $7, %ebp + jz .L043pw_nc_end + movl (%esi), %ecx + movl %ecx, (%ebx) +.L035pw_tail_nc0: + decl %ebp + jz .L043pw_nc_end + movl 4(%esi), %ecx + movl %ecx, 4(%ebx) +.L036pw_tail_nc1: + decl %ebp + jz .L043pw_nc_end + movl 8(%esi), %ecx + movl %ecx, 8(%ebx) +.L037pw_tail_nc2: + decl %ebp + jz .L043pw_nc_end + movl 12(%esi), %ecx + movl %ecx, 12(%ebx) +.L038pw_tail_nc3: + decl %ebp + jz .L043pw_nc_end + movl 16(%esi), %ecx + movl %ecx, 16(%ebx) +.L039pw_tail_nc4: + decl %ebp + jz .L043pw_nc_end + movl 20(%esi), %ecx + movl %ecx, 20(%ebx) +.L040pw_tail_nc5: + decl %ebp + jz .L043pw_nc_end + movl 24(%esi), %ecx + movl %ecx, 24(%ebx) +.L041pw_tail_nc6: +.L043pw_nc_end: + movl $0, %eax +.L021pw_end: + popl %edi + popl %esi + popl %ebx + popl %ebp + ret +.L_bn_sub_part_words_end: +.size bn_sub_part_words,.L_bn_sub_part_words_end-bn_sub_part_words +.ident "bn_sub_part_words" + +.section .bss +.comm OPENSSL_ia32cap_P,4,4 Index: secure/lib/libcrypto/i386/co-586.s =================================================================== --- secure/lib/libcrypto/i386/co-586.s (revision 222101) +++ secure/lib/libcrypto/i386/co-586.s (working copy) @@ -5,13 +5,11 @@ - .file "/usr/src/secure/lib/libcrypto/../../../crypto/openssl/crypto/bn/asm/co-586.s" - .version "01.01" -gcc2_compiled.: + .file "co-586.s" .text - .align 16 -.globl bn_mul_comba8 - .type bn_mul_comba8,@function +.globl bn_mul_comba8 +.type bn_mul_comba8,@function +.align 16 bn_mul_comba8: pushl %esi movl 12(%esp), %esi @@ -552,12 +550,12 @@ bn_mul_comba8: popl %esi ret .L_bn_mul_comba8_end: - .size bn_mul_comba8,.L_bn_mul_comba8_end-bn_mul_comba8 -.ident "desasm.pl" +.size bn_mul_comba8,.L_bn_mul_comba8_end-bn_mul_comba8 +.ident "bn_mul_comba8" .text - .align 16 -.globl bn_mul_comba4 - .type bn_mul_comba4,@function +.globl bn_mul_comba4 +.type bn_mul_comba4,@function +.align 16 bn_mul_comba4: pushl %esi movl 12(%esp), %esi @@ -722,12 +720,12 @@ bn_mul_comba4: popl %esi ret .L_bn_mul_comba4_end: - .size bn_mul_comba4,.L_bn_mul_comba4_end-bn_mul_comba4 -.ident "desasm.pl" +.size bn_mul_comba4,.L_bn_mul_comba4_end-bn_mul_comba4 +.ident "bn_mul_comba4" .text - .align 16 -.globl bn_sqr_comba8 - .type bn_sqr_comba8,@function +.globl bn_sqr_comba8 +.type bn_sqr_comba8,@function +.align 16 bn_sqr_comba8: pushl %esi pushl %edi @@ -1132,12 +1130,12 @@ bn_sqr_comba8: popl %esi ret .L_bn_sqr_comba8_end: - .size bn_sqr_comba8,.L_bn_sqr_comba8_end-bn_sqr_comba8 -.ident "desasm.pl" +.size bn_sqr_comba8,.L_bn_sqr_comba8_end-bn_sqr_comba8 +.ident "bn_sqr_comba8" .text - .align 16 -.globl bn_sqr_comba4 - .type bn_sqr_comba4,@function +.globl bn_sqr_comba4 +.type bn_sqr_comba4,@function +.align 16 bn_sqr_comba4: pushl %esi pushl %edi @@ -1266,5 +1264,5 @@ bn_sqr_comba4: popl %esi ret .L_bn_sqr_comba4_end: - .size bn_sqr_comba4,.L_bn_sqr_comba4_end-bn_sqr_comba4 -.ident "desasm.pl" +.size bn_sqr_comba4,.L_bn_sqr_comba4_end-bn_sqr_comba4 +.ident "bn_sqr_comba4" Index: secure/lib/libcrypto/i386/aes-586.s =================================================================== --- secure/lib/libcrypto/i386/aes-586.s (revision 0) +++ secure/lib/libcrypto/i386/aes-586.s (revision 0) @@ -0,0 +1,1707 @@ + # $FreeBSD$ + + + + + + + .file "aes-586.s" +.globl AES_Te +.text +.globl _x86_AES_encrypt +.type _x86_AES_encrypt,@function +.align 16 +_x86_AES_encrypt: + movl %edi, 12(%esp) + xorl (%edi), %eax + xorl 4(%edi), %ebx + xorl 8(%edi), %ecx + xorl 12(%edi), %edx + movl 240(%edi), %esi + leal -2(%esi,%esi), %esi + leal (%edi,%esi,8), %esi + movl %esi, 16(%esp) +.align 4 +.L000loop: + movl %eax, %esi + andl $255, %esi + movl (%ebp,%esi,8), %esi + movzbl %bh, %edi + xorl 3(%ebp,%edi,8), %esi + movl %ecx, %edi + shrl $16, %edi + andl $255, %edi + xorl 2(%ebp,%edi,8), %esi + movl %edx, %edi + shrl $24, %edi + xorl 1(%ebp,%edi,8), %esi + movl %esi, 4(%esp) + + movl %ebx, %esi + andl $255, %esi + shrl $16, %ebx + movl (%ebp,%esi,8), %esi + movzbl %ch, %edi + xorl 3(%ebp,%edi,8), %esi + movl %edx, %edi + shrl $16, %edi + andl $255, %edi + xorl 2(%ebp,%edi,8), %esi + movl %eax, %edi + shrl $24, %edi + xorl 1(%ebp,%edi,8), %esi + movl %esi, 8(%esp) + + movl %ecx, %esi + andl $255, %esi + shrl $24, %ecx + movl (%ebp,%esi,8), %esi + movzbl %dh, %edi + xorl 3(%ebp,%edi,8), %esi + movl %eax, %edi + shrl $16, %edi + andl $255, %edx + andl $255, %edi + xorl 2(%ebp,%edi,8), %esi + movzbl %bh, %edi + xorl 1(%ebp,%edi,8), %esi + + movl 12(%esp), %edi + movl (%ebp,%edx,8), %edx + movzbl %ah, %eax + xorl 3(%ebp,%eax,8), %edx + movl 4(%esp), %eax + andl $255, %ebx + xorl 2(%ebp,%ebx,8), %edx + movl 8(%esp), %ebx + xorl 1(%ebp,%ecx,8), %edx + movl %esi, %ecx + + addl $16, %edi + xorl (%edi), %eax + xorl 4(%edi), %ebx + xorl 8(%edi), %ecx + xorl 12(%edi), %edx + cmpl 16(%esp), %edi + movl %edi, 12(%esp) + jb .L000loop + movl %eax, %esi + andl $255, %esi + movl 2(%ebp,%esi,8), %esi + andl $255, %esi + movzbl %bh, %edi + movl (%ebp,%edi,8), %edi + andl $65280, %edi + xorl %edi, %esi + movl %ecx, %edi + shrl $16, %edi + andl $255, %edi + movl (%ebp,%edi,8), %edi + andl $16711680, %edi + xorl %edi, %esi + movl %edx, %edi + shrl $24, %edi + movl 2(%ebp,%edi,8), %edi + andl $4278190080, %edi + xorl %edi, %esi + movl %esi, 4(%esp) + movl %ebx, %esi + andl $255, %esi + shrl $16, %ebx + movl 2(%ebp,%esi,8), %esi + andl $255, %esi + movzbl %ch, %edi + movl (%ebp,%edi,8), %edi + andl $65280, %edi + xorl %edi, %esi + movl %edx, %edi + shrl $16, %edi + andl $255, %edi + movl (%ebp,%edi,8), %edi + andl $16711680, %edi + xorl %edi, %esi + movl %eax, %edi + shrl $24, %edi + movl 2(%ebp,%edi,8), %edi + andl $4278190080, %edi + xorl %edi, %esi + movl %esi, 8(%esp) + movl %ecx, %esi + andl $255, %esi + shrl $24, %ecx + movl 2(%ebp,%esi,8), %esi + andl $255, %esi + movzbl %dh, %edi + movl (%ebp,%edi,8), %edi + andl $65280, %edi + xorl %edi, %esi + movl %eax, %edi + shrl $16, %edi + andl $255, %edx + andl $255, %edi + movl (%ebp,%edi,8), %edi + andl $16711680, %edi + xorl %edi, %esi + movzbl %bh, %edi + movl 2(%ebp,%edi,8), %edi + andl $4278190080, %edi + xorl %edi, %esi + movl 12(%esp), %edi + andl $255, %edx + movl 2(%ebp,%edx,8), %edx + andl $255, %edx + movzbl %ah, %eax + movl (%ebp,%eax,8), %eax + andl $65280, %eax + xorl %eax, %edx + movl 4(%esp), %eax + andl $255, %ebx + movl (%ebp,%ebx,8), %ebx + andl $16711680, %ebx + xorl %ebx, %edx + movl 8(%esp), %ebx + movl 2(%ebp,%ecx,8), %ecx + andl $4278190080, %ecx + xorl %ecx, %edx + movl %esi, %ecx + addl $16, %edi + xorl (%edi), %eax + xorl 4(%edi), %ebx + xorl 8(%edi), %ecx + xorl 12(%edi), %edx + ret +.align 64 +AES_Te: + .long 2774754246,2774754246 + .long 2222750968,2222750968 + .long 2574743534,2574743534 + .long 2373680118,2373680118 + .long 234025727,234025727 + .long 3177933782,3177933782 + .long 2976870366,2976870366 + .long 1422247313,1422247313 + .long 1345335392,1345335392 + .long 50397442,50397442 + .long 2842126286,2842126286 + .long 2099981142,2099981142 + .long 436141799,436141799 + .long 1658312629,1658312629 + .long 3870010189,3870010189 + .long 2591454956,2591454956 + .long 1170918031,1170918031 + .long 2642575903,2642575903 + .long 1086966153,1086966153 + .long 2273148410,2273148410 + .long 368769775,368769775 + .long 3948501426,3948501426 + .long 3376891790,3376891790 + .long 200339707,200339707 + .long 3970805057,3970805057 + .long 1742001331,1742001331 + .long 4255294047,4255294047 + .long 3937382213,3937382213 + .long 3214711843,3214711843 + .long 4154762323,4154762323 + .long 2524082916,2524082916 + .long 1539358875,1539358875 + .long 3266819957,3266819957 + .long 486407649,486407649 + .long 2928907069,2928907069 + .long 1780885068,1780885068 + .long 1513502316,1513502316 + .long 1094664062,1094664062 + .long 49805301,49805301 + .long 1338821763,1338821763 + .long 1546925160,1546925160 + .long 4104496465,4104496465 + .long 887481809,887481809 + .long 150073849,150073849 + .long 2473685474,2473685474 + .long 1943591083,1943591083 + .long 1395732834,1395732834 + .long 1058346282,1058346282 + .long 201589768,201589768 + .long 1388824469,1388824469 + .long 1696801606,1696801606 + .long 1589887901,1589887901 + .long 672667696,672667696 + .long 2711000631,2711000631 + .long 251987210,251987210 + .long 3046808111,3046808111 + .long 151455502,151455502 + .long 907153956,907153956 + .long 2608889883,2608889883 + .long 1038279391,1038279391 + .long 652995533,652995533 + .long 1764173646,1764173646 + .long 3451040383,3451040383 + .long 2675275242,2675275242 + .long 453576978,453576978 + .long 2659418909,2659418909 + .long 1949051992,1949051992 + .long 773462580,773462580 + .long 756751158,756751158 + .long 2993581788,2993581788 + .long 3998898868,3998898868 + .long 4221608027,4221608027 + .long 4132590244,4132590244 + .long 1295727478,1295727478 + .long 1641469623,1641469623 + .long 3467883389,3467883389 + .long 2066295122,2066295122 + .long 1055122397,1055122397 + .long 1898917726,1898917726 + .long 2542044179,2542044179 + .long 4115878822,4115878822 + .long 1758581177,1758581177 + .long 0,0 + .long 753790401,753790401 + .long 1612718144,1612718144 + .long 536673507,536673507 + .long 3367088505,3367088505 + .long 3982187446,3982187446 + .long 3194645204,3194645204 + .long 1187761037,1187761037 + .long 3653156455,3653156455 + .long 1262041458,1262041458 + .long 3729410708,3729410708 + .long 3561770136,3561770136 + .long 3898103984,3898103984 + .long 1255133061,1255133061 + .long 1808847035,1808847035 + .long 720367557,720367557 + .long 3853167183,3853167183 + .long 385612781,385612781 + .long 3309519750,3309519750 + .long 3612167578,3612167578 + .long 1429418854,1429418854 + .long 2491778321,2491778321 + .long 3477423498,3477423498 + .long 284817897,284817897 + .long 100794884,100794884 + .long 2172616702,2172616702 + .long 4031795360,4031795360 + .long 1144798328,1144798328 + .long 3131023141,3131023141 + .long 3819481163,3819481163 + .long 4082192802,4082192802 + .long 4272137053,4272137053 + .long 3225436288,3225436288 + .long 2324664069,2324664069 + .long 2912064063,2912064063 + .long 3164445985,3164445985 + .long 1211644016,1211644016 + .long 83228145,83228145 + .long 3753688163,3753688163 + .long 3249976951,3249976951 + .long 1977277103,1977277103 + .long 1663115586,1663115586 + .long 806359072,806359072 + .long 452984805,452984805 + .long 250868733,250868733 + .long 1842533055,1842533055 + .long 1288555905,1288555905 + .long 336333848,336333848 + .long 890442534,890442534 + .long 804056259,804056259 + .long 3781124030,3781124030 + .long 2727843637,2727843637 + .long 3427026056,3427026056 + .long 957814574,957814574 + .long 1472513171,1472513171 + .long 4071073621,4071073621 + .long 2189328124,2189328124 + .long 1195195770,1195195770 + .long 2892260552,2892260552 + .long 3881655738,3881655738 + .long 723065138,723065138 + .long 2507371494,2507371494 + .long 2690670784,2690670784 + .long 2558624025,2558624025 + .long 3511635870,3511635870 + .long 2145180835,2145180835 + .long 1713513028,1713513028 + .long 2116692564,2116692564 + .long 2878378043,2878378043 + .long 2206763019,2206763019 + .long 3393603212,3393603212 + .long 703524551,703524551 + .long 3552098411,3552098411 + .long 1007948840,1007948840 + .long 2044649127,2044649127 + .long 3797835452,3797835452 + .long 487262998,487262998 + .long 1994120109,1994120109 + .long 1004593371,1004593371 + .long 1446130276,1446130276 + .long 1312438900,1312438900 + .long 503974420,503974420 + .long 3679013266,3679013266 + .long 168166924,168166924 + .long 1814307912,1814307912 + .long 3831258296,3831258296 + .long 1573044895,1573044895 + .long 1859376061,1859376061 + .long 4021070915,4021070915 + .long 2791465668,2791465668 + .long 2828112185,2828112185 + .long 2761266481,2761266481 + .long 937747667,937747667 + .long 2339994098,2339994098 + .long 854058965,854058965 + .long 1137232011,1137232011 + .long 1496790894,1496790894 + .long 3077402074,3077402074 + .long 2358086913,2358086913 + .long 1691735473,1691735473 + .long 3528347292,3528347292 + .long 3769215305,3769215305 + .long 3027004632,3027004632 + .long 4199962284,4199962284 + .long 133494003,133494003 + .long 636152527,636152527 + .long 2942657994,2942657994 + .long 2390391540,2390391540 + .long 3920539207,3920539207 + .long 403179536,403179536 + .long 3585784431,3585784431 + .long 2289596656,2289596656 + .long 1864705354,1864705354 + .long 1915629148,1915629148 + .long 605822008,605822008 + .long 4054230615,4054230615 + .long 3350508659,3350508659 + .long 1371981463,1371981463 + .long 602466507,602466507 + .long 2094914977,2094914977 + .long 2624877800,2624877800 + .long 555687742,555687742 + .long 3712699286,3712699286 + .long 3703422305,3703422305 + .long 2257292045,2257292045 + .long 2240449039,2240449039 + .long 2423288032,2423288032 + .long 1111375484,1111375484 + .long 3300242801,3300242801 + .long 2858837708,2858837708 + .long 3628615824,3628615824 + .long 84083462,84083462 + .long 32962295,32962295 + .long 302911004,302911004 + .long 2741068226,2741068226 + .long 1597322602,1597322602 + .long 4183250862,4183250862 + .long 3501832553,3501832553 + .long 2441512471,2441512471 + .long 1489093017,1489093017 + .long 656219450,656219450 + .long 3114180135,3114180135 + .long 954327513,954327513 + .long 335083755,335083755 + .long 3013122091,3013122091 + .long 856756514,856756514 + .long 3144247762,3144247762 + .long 1893325225,1893325225 + .long 2307821063,2307821063 + .long 2811532339,2811532339 + .long 3063651117,3063651117 + .long 572399164,572399164 + .long 2458355477,2458355477 + .long 552200649,552200649 + .long 1238290055,1238290055 + .long 4283782570,4283782570 + .long 2015897680,2015897680 + .long 2061492133,2061492133 + .long 2408352771,2408352771 + .long 4171342169,4171342169 + .long 2156497161,2156497161 + .long 386731290,386731290 + .long 3669999461,3669999461 + .long 837215959,837215959 + .long 3326231172,3326231172 + .long 3093850320,3093850320 + .long 3275833730,3275833730 + .long 2962856233,2962856233 + .long 1999449434,1999449434 + .long 286199582,286199582 + .long 3417354363,3417354363 + .long 4233385128,4233385128 + .long 3602627437,3602627437 + .long 974525996,974525996 + .long 1,2,4,8 + .long 16,32,64,128 + .long 27,54,0,0,0,0,0,0 +.L__x86_AES_encrypt_end: +.size _x86_AES_encrypt,.L__x86_AES_encrypt_end-_x86_AES_encrypt +.ident "_x86_AES_encrypt" +.globl AES_Te +.text +.globl AES_encrypt +.type AES_encrypt,@function +.align 16 +AES_encrypt: + pushl %ebp + pushl %ebx + pushl %esi + pushl %edi + + movl 20(%esp), %esi + movl 28(%esp), %edi + movl %esp, %eax + subl $24, %esp + andl $-64, %esp + addl $4, %esp + movl %eax, 16(%esp) + call .L001pic_point +.L001pic_point: + popl %ebp + leal AES_Te-.L001pic_point(%ebp),%ebp + movl (%esi), %eax + movl 4(%esi), %ebx + movl 8(%esi), %ecx + movl 12(%esi), %edx + call _x86_AES_encrypt + movl 16(%esp), %esp + movl 24(%esp), %esi + movl %eax, (%esi) + movl %ebx, 4(%esi) + movl %ecx, 8(%esi) + movl %edx, 12(%esi) + popl %edi + popl %esi + popl %ebx + popl %ebp + ret +.L_AES_encrypt_end: +.size AES_encrypt,.L_AES_encrypt_end-AES_encrypt +.ident "AES_encrypt" +.globl AES_Td +.text +.globl _x86_AES_decrypt +.type _x86_AES_decrypt,@function +.align 16 +_x86_AES_decrypt: + movl %edi, 12(%esp) + xorl (%edi), %eax + xorl 4(%edi), %ebx + xorl 8(%edi), %ecx + xorl 12(%edi), %edx + movl 240(%edi), %esi + leal -2(%esi,%esi), %esi + leal (%edi,%esi,8), %esi + movl %esi, 16(%esp) +.align 4 +.L002loop: + movl %eax, %esi + andl $255, %esi + movl (%ebp,%esi,8), %esi + movzbl %dh, %edi + xorl 3(%ebp,%edi,8), %esi + movl %ecx, %edi + shrl $16, %edi + andl $255, %edi + xorl 2(%ebp,%edi,8), %esi + movl %ebx, %edi + shrl $24, %edi + xorl 1(%ebp,%edi,8), %esi + movl %esi, 4(%esp) + + movl %ebx, %esi + andl $255, %esi + movl (%ebp,%esi,8), %esi + movzbl %ah, %edi + xorl 3(%ebp,%edi,8), %esi + movl %edx, %edi + shrl $16, %edi + andl $255, %edi + xorl 2(%ebp,%edi,8), %esi + movl %ecx, %edi + shrl $24, %edi + xorl 1(%ebp,%edi,8), %esi + movl %esi, 8(%esp) + + movl %ecx, %esi + andl $255, %esi + movl (%ebp,%esi,8), %esi + movzbl %bh, %edi + xorl 3(%ebp,%edi,8), %esi + movl %eax, %edi + shrl $16, %edi + andl $255, %edi + xorl 2(%ebp,%edi,8), %esi + movl %edx, %edi + shrl $24, %edi + xorl 1(%ebp,%edi,8), %esi + + movl 12(%esp), %edi + andl $255, %edx + movl (%ebp,%edx,8), %edx + movzbl %ch, %ecx + xorl 3(%ebp,%ecx,8), %edx + movl %esi, %ecx + shrl $16, %ebx + andl $255, %ebx + xorl 2(%ebp,%ebx,8), %edx + movl 8(%esp), %ebx + shrl $24, %eax + xorl 1(%ebp,%eax,8), %edx + movl 4(%esp), %eax + + addl $16, %edi + xorl (%edi), %eax + xorl 4(%edi), %ebx + xorl 8(%edi), %ecx + xorl 12(%edi), %edx + cmpl 16(%esp), %edi + movl %edi, 12(%esp) + jb .L002loop + movl %eax, %esi + andl $255, %esi + movzbl 2048(%ebp,%esi,1),%esi + movzbl %dh, %edi + movzbl 2048(%ebp,%edi,1),%edi + sall $8, %edi + xorl %edi, %esi + movl %ecx, %edi + shrl $16, %edi + andl $255, %edi + movzbl 2048(%ebp,%edi,1),%edi + sall $16, %edi + xorl %edi, %esi + movl %ebx, %edi + shrl $24, %edi + movzbl 2048(%ebp,%edi,1),%edi + sall $24, %edi + xorl %edi, %esi + movl %esi, 4(%esp) + movl %ebx, %esi + andl $255, %esi + movzbl 2048(%ebp,%esi,1),%esi + movzbl %ah, %edi + movzbl 2048(%ebp,%edi,1),%edi + sall $8, %edi + xorl %edi, %esi + movl %edx, %edi + shrl $16, %edi + andl $255, %edi + movzbl 2048(%ebp,%edi,1),%edi + sall $16, %edi + xorl %edi, %esi + movl %ecx, %edi + shrl $24, %edi + movzbl 2048(%ebp,%edi,1),%edi + sall $24, %edi + xorl %edi, %esi + movl %esi, 8(%esp) + movl %ecx, %esi + andl $255, %esi + movzbl 2048(%ebp,%esi,1),%esi + movzbl %bh, %edi + movzbl 2048(%ebp,%edi,1),%edi + sall $8, %edi + xorl %edi, %esi + movl %eax, %edi + shrl $16, %edi + andl $255, %edi + movzbl 2048(%ebp,%edi,1),%edi + sall $16, %edi + xorl %edi, %esi + movl %edx, %edi + shrl $24, %edi + movzbl 2048(%ebp,%edi,1),%edi + sall $24, %edi + xorl %edi, %esi + movl 12(%esp), %edi + andl $255, %edx + movzbl 2048(%ebp,%edx,1),%edx + movzbl %ch, %ecx + movzbl 2048(%ebp,%ecx,1),%ecx + sall $8, %ecx + xorl %ecx, %edx + movl %esi, %ecx + shrl $16, %ebx + andl $255, %ebx + movzbl 2048(%ebp,%ebx,1),%ebx + sall $16, %ebx + xorl %ebx, %edx + movl 8(%esp), %ebx + shrl $24, %eax + movzbl 2048(%ebp,%eax,1),%eax + sall $24, %eax + xorl %eax, %edx + movl 4(%esp), %eax + addl $16, %edi + xorl (%edi), %eax + xorl 4(%edi), %ebx + xorl 8(%edi), %ecx + xorl 12(%edi), %edx + ret +.align 64 +AES_Td: + .long 1353184337,1353184337 + .long 1399144830,1399144830 + .long 3282310938,3282310938 + .long 2522752826,2522752826 + .long 3412831035,3412831035 + .long 4047871263,4047871263 + .long 2874735276,2874735276 + .long 2466505547,2466505547 + .long 1442459680,1442459680 + .long 4134368941,4134368941 + .long 2440481928,2440481928 + .long 625738485,625738485 + .long 4242007375,4242007375 + .long 3620416197,3620416197 + .long 2151953702,2151953702 + .long 2409849525,2409849525 + .long 1230680542,1230680542 + .long 1729870373,1729870373 + .long 2551114309,2551114309 + .long 3787521629,3787521629 + .long 41234371,41234371 + .long 317738113,317738113 + .long 2744600205,2744600205 + .long 3338261355,3338261355 + .long 3881799427,3881799427 + .long 2510066197,2510066197 + .long 3950669247,3950669247 + .long 3663286933,3663286933 + .long 763608788,763608788 + .long 3542185048,3542185048 + .long 694804553,694804553 + .long 1154009486,1154009486 + .long 1787413109,1787413109 + .long 2021232372,2021232372 + .long 1799248025,1799248025 + .long 3715217703,3715217703 + .long 3058688446,3058688446 + .long 397248752,397248752 + .long 1722556617,1722556617 + .long 3023752829,3023752829 + .long 407560035,407560035 + .long 2184256229,2184256229 + .long 1613975959,1613975959 + .long 1165972322,1165972322 + .long 3765920945,3765920945 + .long 2226023355,2226023355 + .long 480281086,480281086 + .long 2485848313,2485848313 + .long 1483229296,1483229296 + .long 436028815,436028815 + .long 2272059028,2272059028 + .long 3086515026,3086515026 + .long 601060267,601060267 + .long 3791801202,3791801202 + .long 1468997603,1468997603 + .long 715871590,715871590 + .long 120122290,120122290 + .long 63092015,63092015 + .long 2591802758,2591802758 + .long 2768779219,2768779219 + .long 4068943920,4068943920 + .long 2997206819,2997206819 + .long 3127509762,3127509762 + .long 1552029421,1552029421 + .long 723308426,723308426 + .long 2461301159,2461301159 + .long 4042393587,4042393587 + .long 2715969870,2715969870 + .long 3455375973,3455375973 + .long 3586000134,3586000134 + .long 526529745,526529745 + .long 2331944644,2331944644 + .long 2639474228,2639474228 + .long 2689987490,2689987490 + .long 853641733,853641733 + .long 1978398372,1978398372 + .long 971801355,971801355 + .long 2867814464,2867814464 + .long 111112542,111112542 + .long 1360031421,1360031421 + .long 4186579262,4186579262 + .long 1023860118,1023860118 + .long 2919579357,2919579357 + .long 1186850381,1186850381 + .long 3045938321,3045938321 + .long 90031217,90031217 + .long 1876166148,1876166148 + .long 4279586912,4279586912 + .long 620468249,620468249 + .long 2548678102,2548678102 + .long 3426959497,3426959497 + .long 2006899047,2006899047 + .long 3175278768,3175278768 + .long 2290845959,2290845959 + .long 945494503,945494503 + .long 3689859193,3689859193 + .long 1191869601,1191869601 + .long 3910091388,3910091388 + .long 3374220536,3374220536 + .long 0,0 + .long 2206629897,2206629897 + .long 1223502642,1223502642 + .long 2893025566,2893025566 + .long 1316117100,1316117100 + .long 4227796733,4227796733 + .long 1446544655,1446544655 + .long 517320253,517320253 + .long 658058550,658058550 + .long 1691946762,1691946762 + .long 564550760,564550760 + .long 3511966619,3511966619 + .long 976107044,976107044 + .long 2976320012,2976320012 + .long 266819475,266819475 + .long 3533106868,3533106868 + .long 2660342555,2660342555 + .long 1338359936,1338359936 + .long 2720062561,2720062561 + .long 1766553434,1766553434 + .long 370807324,370807324 + .long 179999714,179999714 + .long 3844776128,3844776128 + .long 1138762300,1138762300 + .long 488053522,488053522 + .long 185403662,185403662 + .long 2915535858,2915535858 + .long 3114841645,3114841645 + .long 3366526484,3366526484 + .long 2233069911,2233069911 + .long 1275557295,1275557295 + .long 3151862254,3151862254 + .long 4250959779,4250959779 + .long 2670068215,2670068215 + .long 3170202204,3170202204 + .long 3309004356,3309004356 + .long 880737115,880737115 + .long 1982415755,1982415755 + .long 3703972811,3703972811 + .long 1761406390,1761406390 + .long 1676797112,1676797112 + .long 3403428311,3403428311 + .long 277177154,277177154 + .long 1076008723,1076008723 + .long 538035844,538035844 + .long 2099530373,2099530373 + .long 4164795346,4164795346 + .long 288553390,288553390 + .long 1839278535,1839278535 + .long 1261411869,1261411869 + .long 4080055004,4080055004 + .long 3964831245,3964831245 + .long 3504587127,3504587127 + .long 1813426987,1813426987 + .long 2579067049,2579067049 + .long 4199060497,4199060497 + .long 577038663,577038663 + .long 3297574056,3297574056 + .long 440397984,440397984 + .long 3626794326,3626794326 + .long 4019204898,4019204898 + .long 3343796615,3343796615 + .long 3251714265,3251714265 + .long 4272081548,4272081548 + .long 906744984,906744984 + .long 3481400742,3481400742 + .long 685669029,685669029 + .long 646887386,646887386 + .long 2764025151,2764025151 + .long 3835509292,3835509292 + .long 227702864,227702864 + .long 2613862250,2613862250 + .long 1648787028,1648787028 + .long 3256061430,3256061430 + .long 3904428176,3904428176 + .long 1593260334,1593260334 + .long 4121936770,4121936770 + .long 3196083615,3196083615 + .long 2090061929,2090061929 + .long 2838353263,2838353263 + .long 3004310991,3004310991 + .long 999926984,999926984 + .long 2809993232,2809993232 + .long 1852021992,1852021992 + .long 2075868123,2075868123 + .long 158869197,158869197 + .long 4095236462,4095236462 + .long 28809964,28809964 + .long 2828685187,2828685187 + .long 1701746150,1701746150 + .long 2129067946,2129067946 + .long 147831841,147831841 + .long 3873969647,3873969647 + .long 3650873274,3650873274 + .long 3459673930,3459673930 + .long 3557400554,3557400554 + .long 3598495785,3598495785 + .long 2947720241,2947720241 + .long 824393514,824393514 + .long 815048134,815048134 + .long 3227951669,3227951669 + .long 935087732,935087732 + .long 2798289660,2798289660 + .long 2966458592,2966458592 + .long 366520115,366520115 + .long 1251476721,1251476721 + .long 4158319681,4158319681 + .long 240176511,240176511 + .long 804688151,804688151 + .long 2379631990,2379631990 + .long 1303441219,1303441219 + .long 1414376140,1414376140 + .long 3741619940,3741619940 + .long 3820343710,3820343710 + .long 461924940,461924940 + .long 3089050817,3089050817 + .long 2136040774,2136040774 + .long 82468509,82468509 + .long 1563790337,1563790337 + .long 1937016826,1937016826 + .long 776014843,776014843 + .long 1511876531,1511876531 + .long 1389550482,1389550482 + .long 861278441,861278441 + .long 323475053,323475053 + .long 2355222426,2355222426 + .long 2047648055,2047648055 + .long 2383738969,2383738969 + .long 2302415851,2302415851 + .long 3995576782,3995576782 + .long 902390199,902390199 + .long 3991215329,3991215329 + .long 1018251130,1018251130 + .long 1507840668,1507840668 + .long 1064563285,1064563285 + .long 2043548696,2043548696 + .long 3208103795,3208103795 + .long 3939366739,3939366739 + .long 1537932639,1537932639 + .long 342834655,342834655 + .long 2262516856,2262516856 + .long 2180231114,2180231114 + .long 1053059257,1053059257 + .long 741614648,741614648 + .long 1598071746,1598071746 + .long 1925389590,1925389590 + .long 203809468,203809468 + .long 2336832552,2336832552 + .long 1100287487,1100287487 + .long 1895934009,1895934009 + .long 3736275976,3736275976 + .long 2632234200,2632234200 + .long 2428589668,2428589668 + .long 1636092795,1636092795 + .long 1890988757,1890988757 + .long 1952214088,1952214088 + .long 1113045200,1113045200 + .byte 82,9,106,213,48,54,165,56 + .byte 191,64,163,158,129,243,215,251 + .byte 124,227,57,130,155,47,255,135 + .byte 52,142,67,68,196,222,233,203 + .byte 84,123,148,50,166,194,35,61 + .byte 238,76,149,11,66,250,195,78 + .byte 8,46,161,102,40,217,36,178 + .byte 118,91,162,73,109,139,209,37 + .byte 114,248,246,100,134,104,152,22 + .byte 212,164,92,204,93,101,182,146 + .byte 108,112,72,80,253,237,185,218 + .byte 94,21,70,87,167,141,157,132 + .byte 144,216,171,0,140,188,211,10 + .byte 247,228,88,5,184,179,69,6 + .byte 208,44,30,143,202,63,15,2 + .byte 193,175,189,3,1,19,138,107 + .byte 58,145,17,65,79,103,220,234 + .byte 151,242,207,206,240,180,230,115 + .byte 150,172,116,34,231,173,53,133 + .byte 226,249,55,232,28,117,223,110 + .byte 71,241,26,113,29,41,197,137 + .byte 111,183,98,14,170,24,190,27 + .byte 252,86,62,75,198,210,121,32 + .byte 154,219,192,254,120,205,90,244 + .byte 31,221,168,51,136,7,199,49 + .byte 177,18,16,89,39,128,236,95 + .byte 96,81,127,169,25,181,74,13 + .byte 45,229,122,159,147,201,156,239 + .byte 160,224,59,77,174,42,245,176 + .byte 200,235,187,60,131,83,153,97 + .byte 23,43,4,126,186,119,214,38 + .byte 225,105,20,99,85,33,12,125 +.L__x86_AES_decrypt_end: +.size _x86_AES_decrypt,.L__x86_AES_decrypt_end-_x86_AES_decrypt +.ident "_x86_AES_decrypt" +.globl AES_Td +.text +.globl AES_decrypt +.type AES_decrypt,@function +.align 16 +AES_decrypt: + pushl %ebp + pushl %ebx + pushl %esi + pushl %edi + + movl 20(%esp), %esi + movl 28(%esp), %edi + movl %esp, %eax + subl $24, %esp + andl $-64, %esp + addl $4, %esp + movl %eax, 16(%esp) + call .L003pic_point +.L003pic_point: + popl %ebp + leal AES_Td-.L003pic_point(%ebp),%ebp + leal 2176(%ebp), %ebp + movl -128(%ebp), %eax + movl -96(%ebp), %ebx + movl -64(%ebp), %ecx + movl -32(%ebp), %edx + movl (%ebp), %eax + movl 32(%ebp), %ebx + movl 64(%ebp), %ecx + movl 96(%ebp), %edx + leal -2176(%ebp), %ebp + movl (%esi), %eax + movl 4(%esi), %ebx + movl 8(%esi), %ecx + movl 12(%esi), %edx + call _x86_AES_decrypt + movl 16(%esp), %esp + movl 24(%esp), %esi + movl %eax, (%esi) + movl %ebx, 4(%esi) + movl %ecx, 8(%esi) + movl %edx, 12(%esi) + popl %edi + popl %esi + popl %ebx + popl %ebp + ret +.L_AES_decrypt_end: +.size AES_decrypt,.L_AES_decrypt_end-AES_decrypt +.ident "AES_decrypt" +.globl AES_Te +.globl AES_Td +.text +.globl AES_cbc_encrypt +.type AES_cbc_encrypt,@function +.align 16 +AES_cbc_encrypt: + pushl %ebp + pushl %ebx + pushl %esi + pushl %edi + + movl 28(%esp), %ecx + cmpl $0, %ecx + je .L004enc_out + call .L005pic_point +.L005pic_point: + popl %ebp + pushfl + cld + cmpl $0, 44(%esp) + je .L006DECRYPT + leal AES_Te-.L005pic_point(%ebp),%ebp + leal -308(%esp), %edi + andl $-64, %edi + movl %ebp, %eax + leal 2048(%ebp), %ebx + movl %edi, %edx + andl $4095, %eax + andl $4095, %ebx + andl $4095, %edx + cmpl %ebx, %edx + jb .L007te_break_out + subl %ebx, %edx + subl %edx, %edi + jmp .L008te_ok +.L007te_break_out: + subl %eax, %edx + andl $4095, %edx + addl $320, %edx + subl %edx, %edi +.align 4 +.L008te_ok: + movl 24(%esp), %eax + movl 28(%esp), %ebx + movl 36(%esp), %edx + movl 40(%esp), %esi + xchgl %edi, %esp + addl $4, %esp + movl %edi, 16(%esp) + movl %eax, 20(%esp) + movl %ebx, 24(%esp) + movl %ecx, 28(%esp) + movl %edx, 32(%esp) + movl %esi, 36(%esp) + movl $0, 300(%esp) + movl %edx, %ebx + movl $61, %ecx + subl %ebp, %ebx + movl %edx, %esi + andl $4095, %ebx + leal 60(%esp), %edi + cmpl $2048, %ebx + jb .L009do_ecopy + cmpl $3852, %ebx + jb .L010skip_ecopy +.align 4 +.L009do_ecopy: + movl %edi, 32(%esp) + .long 2784229001 +.L010skip_ecopy: + movl %eax, %esi + movl $16, %edi +.align 4 +.L011prefetch_te: + movl (%ebp), %eax + movl 32(%ebp), %ebx + movl 64(%ebp), %ecx + movl 96(%ebp), %edx + leal 128(%ebp), %ebp + decl %edi + jnz .L011prefetch_te + subl $2048, %ebp + movl 28(%esp), %ecx + movl 36(%esp), %edi + testl $4294967280, %ecx + jz .L012enc_tail + movl (%edi), %eax + movl 4(%edi), %ebx +.align 4 +.L013enc_loop: + movl 8(%edi), %ecx + movl 12(%edi), %edx + xorl (%esi), %eax + xorl 4(%esi), %ebx + xorl 8(%esi), %ecx + xorl 12(%esi), %edx + movl 32(%esp), %edi + call _x86_AES_encrypt + movl 20(%esp), %esi + movl 24(%esp), %edi + movl %eax, (%edi) + movl %ebx, 4(%edi) + movl %ecx, 8(%edi) + movl %edx, 12(%edi) + movl 28(%esp), %ecx + leal 16(%esi), %esi + movl %esi, 20(%esp) + leal 16(%edi), %edx + movl %edx, 24(%esp) + subl $16, %ecx + testl $4294967280, %ecx + movl %ecx, 28(%esp) + jnz .L013enc_loop + testl $15, %ecx + jnz .L012enc_tail + movl 36(%esp), %esi + movl 8(%edi), %ecx + movl 12(%edi), %edx + movl %eax, (%esi) + movl %ebx, 4(%esi) + movl %ecx, 8(%esi) + movl %edx, 12(%esi) + cmpl $0, 300(%esp) + movl 32(%esp), %edi + je .L014skip_ezero + movl $60, %ecx + xorl %eax, %eax +.align 4 + .long 2884892297 +.L014skip_ezero: + movl 16(%esp), %esp + popfl +.L004enc_out: + popl %edi + popl %esi + popl %ebx + popl %ebp + ret + pushfl +.align 4 +.L012enc_tail: + movl %edi, %eax + movl 24(%esp), %edi + pushl %eax + movl $16, %ebx + subl %ecx, %ebx + cmpl %esi, %edi + je .L015enc_in_place +.align 4 + .long 2767451785 + jmp .L016enc_skip_in_place +.L015enc_in_place: + leal (%edi,%ecx), %edi +.L016enc_skip_in_place: + movl %ebx, %ecx + xorl %eax, %eax +.align 4 + .long 2868115081 + popl %edi + movl 24(%esp), %esi + movl (%edi), %eax + movl 4(%edi), %ebx + movl $16, 28(%esp) + jmp .L013enc_loop +.align 4 +.L006DECRYPT: + leal AES_Td-.L005pic_point(%ebp),%ebp + leal -308(%esp), %edi + andl $-64, %edi + movl %ebp, %eax + leal 2304(%ebp), %ebx + movl %edi, %edx + andl $4095, %eax + andl $4095, %ebx + andl $4095, %edx + cmpl %ebx, %edx + jb .L017td_break_out + subl %ebx, %edx + subl %edx, %edi + jmp .L018td_ok +.L017td_break_out: + subl %eax, %edx + andl $4095, %edx + addl $320, %edx + subl %edx, %edi +.align 4 +.L018td_ok: + movl 24(%esp), %eax + movl 28(%esp), %ebx + movl 36(%esp), %edx + movl 40(%esp), %esi + xchgl %edi, %esp + addl $4, %esp + movl %edi, 16(%esp) + movl %eax, 20(%esp) + movl %ebx, 24(%esp) + movl %ecx, 28(%esp) + movl %edx, 32(%esp) + movl %esi, 36(%esp) + movl $0, 300(%esp) + movl %edx, %ebx + movl $61, %ecx + subl %ebp, %ebx + movl %edx, %esi + andl $4095, %ebx + leal 60(%esp), %edi + cmpl $2304, %ebx + jb .L019do_dcopy + cmpl $3852, %ebx + jb .L020skip_dcopy +.align 4 +.L019do_dcopy: + movl %edi, 32(%esp) + .long 2784229001 +.L020skip_dcopy: + movl %eax, %esi + movl $18, %edi +.align 4 +.L021prefetch_td: + movl (%ebp), %eax + movl 32(%ebp), %ebx + movl 64(%ebp), %ecx + movl 96(%ebp), %edx + leal 128(%ebp), %ebp + decl %edi + jnz .L021prefetch_td + subl $2304, %ebp + cmpl 24(%esp), %esi + je .L022dec_in_place + movl 36(%esp), %edi + movl %edi, 40(%esp) +.align 4 +.L023dec_loop: + movl (%esi), %eax + movl 4(%esi), %ebx + movl 8(%esi), %ecx + movl 12(%esi), %edx + movl 32(%esp), %edi + call _x86_AES_decrypt + movl 40(%esp), %edi + movl 28(%esp), %esi + xorl (%edi), %eax + xorl 4(%edi), %ebx + xorl 8(%edi), %ecx + xorl 12(%edi), %edx + subl $16, %esi + jc .L024dec_partial + movl %esi, 28(%esp) + movl 20(%esp), %esi + movl 24(%esp), %edi + movl %eax, (%edi) + movl %ebx, 4(%edi) + movl %ecx, 8(%edi) + movl %edx, 12(%edi) + movl %esi, 40(%esp) + leal 16(%esi), %esi + movl %esi, 20(%esp) + leal 16(%edi), %edi + movl %edi, 24(%esp) + jnz .L023dec_loop + movl 40(%esp), %edi +.L025dec_end: + movl 36(%esp), %esi + movl (%edi), %eax + movl 4(%edi), %ebx + movl 8(%edi), %ecx + movl 12(%edi), %edx + movl %eax, (%esi) + movl %ebx, 4(%esi) + movl %ecx, 8(%esi) + movl %edx, 12(%esi) + jmp .L026dec_out +.align 4 +.L024dec_partial: + leal 44(%esp), %edi + movl %eax, (%edi) + movl %ebx, 4(%edi) + movl %ecx, 8(%edi) + movl %edx, 12(%edi) + leal 16(%esi), %ecx + movl %edi, %esi + movl 24(%esp), %edi + .long 2767451785 + movl 20(%esp), %edi + jmp .L025dec_end +.align 4 +.L022dec_in_place: +.L027dec_in_place_loop: + leal 44(%esp), %edi + movl (%esi), %eax + movl 4(%esi), %ebx + movl 8(%esi), %ecx + movl 12(%esi), %edx + movl %eax, (%edi) + movl %ebx, 4(%edi) + movl %ecx, 8(%edi) + movl %edx, 12(%edi) + movl 32(%esp), %edi + call _x86_AES_decrypt + movl 36(%esp), %edi + movl 24(%esp), %esi + xorl (%edi), %eax + xorl 4(%edi), %ebx + xorl 8(%edi), %ecx + xorl 12(%edi), %edx + movl %eax, (%esi) + movl %ebx, 4(%esi) + movl %ecx, 8(%esi) + movl %edx, 12(%esi) + leal 16(%esi), %esi + movl %esi, 24(%esp) + leal 44(%esp), %esi + movl (%esi), %eax + movl 4(%esi), %ebx + movl 8(%esi), %ecx + movl 12(%esi), %edx + movl %eax, (%edi) + movl %ebx, 4(%edi) + movl %ecx, 8(%edi) + movl %edx, 12(%edi) + movl 20(%esp), %esi + leal 16(%esi), %esi + movl %esi, 20(%esp) + movl 28(%esp), %ecx + subl $16, %ecx + jc .L028dec_in_place_partial + movl %ecx, 28(%esp) + jnz .L027dec_in_place_loop + jmp .L026dec_out +.align 4 +.L028dec_in_place_partial: + movl 24(%esp), %edi + leal 44(%esp), %esi + leal (%edi,%ecx), %edi + leal 16(%esi,%ecx), %esi + negl %ecx + .long 2767451785 +.align 4 +.L026dec_out: + cmpl $0, 300(%esp) + movl 32(%esp), %edi + je .L029skip_dzero + movl $60, %ecx + xorl %eax, %eax +.align 4 + .long 2884892297 +.L029skip_dzero: + movl 16(%esp), %esp + popfl + popl %edi + popl %esi + popl %ebx + popl %ebp + ret +.L_AES_cbc_encrypt_end: +.size AES_cbc_encrypt,.L_AES_cbc_encrypt_end-AES_cbc_encrypt +.ident "AES_cbc_encrypt" +.globl AES_Te +.text +.globl AES_set_encrypt_key +.type AES_set_encrypt_key,@function +.align 16 +AES_set_encrypt_key: + pushl %ebp + pushl %ebx + pushl %esi + pushl %edi + + movl 20(%esp), %esi + movl 28(%esp), %edi + testl $-1, %esi + jz .L030badpointer + testl $-1, %edi + jz .L030badpointer + call .L031pic_point +.L031pic_point: + popl %ebp + leal AES_Te-.L031pic_point(%ebp),%ebp + movl 24(%esp), %ecx + cmpl $128, %ecx + je .L03210rounds + cmpl $192, %ecx + je .L03312rounds + cmpl $256, %ecx + je .L03414rounds + movl $-2, %eax + jmp .L035exit +.L03210rounds: + movl (%esi), %eax + movl 4(%esi), %ebx + movl 8(%esi), %ecx + movl 12(%esi), %edx + movl %eax, (%edi) + movl %ebx, 4(%edi) + movl %ecx, 8(%edi) + movl %edx, 12(%edi) + xorl %ecx, %ecx + jmp .L03610shortcut +.align 4 +.L03710loop: + movl (%edi), %eax + movl 12(%edi), %edx +.L03610shortcut: + movzbl %dl, %esi + movl 2(%ebp,%esi,8), %ebx + movzbl %dh, %esi + andl $4278190080, %ebx + xorl %ebx, %eax + movl 2(%ebp,%esi,8), %ebx + shrl $16, %edx + andl $255, %ebx + movzbl %dl, %esi + xorl %ebx, %eax + movl (%ebp,%esi,8), %ebx + movzbl %dh, %esi + andl $65280, %ebx + xorl %ebx, %eax + movl (%ebp,%esi,8), %ebx + andl $16711680, %ebx + xorl %ebx, %eax + xorl 2048(%ebp,%ecx,4),%eax + movl %eax, 16(%edi) + xorl 4(%edi), %eax + movl %eax, 20(%edi) + xorl 8(%edi), %eax + movl %eax, 24(%edi) + xorl 12(%edi), %eax + movl %eax, 28(%edi) + incl %ecx + addl $16, %edi + cmpl $10, %ecx + jl .L03710loop + movl $10, 80(%edi) + xorl %eax, %eax + jmp .L035exit +.L03312rounds: + movl (%esi), %eax + movl 4(%esi), %ebx + movl 8(%esi), %ecx + movl 12(%esi), %edx + movl %eax, (%edi) + movl %ebx, 4(%edi) + movl %ecx, 8(%edi) + movl %edx, 12(%edi) + movl 16(%esi), %ecx + movl 20(%esi), %edx + movl %ecx, 16(%edi) + movl %edx, 20(%edi) + xorl %ecx, %ecx + jmp .L03812shortcut +.align 4 +.L03912loop: + movl (%edi), %eax + movl 20(%edi), %edx +.L03812shortcut: + movzbl %dl, %esi + movl 2(%ebp,%esi,8), %ebx + movzbl %dh, %esi + andl $4278190080, %ebx + xorl %ebx, %eax + movl 2(%ebp,%esi,8), %ebx + shrl $16, %edx + andl $255, %ebx + movzbl %dl, %esi + xorl %ebx, %eax + movl (%ebp,%esi,8), %ebx + movzbl %dh, %esi + andl $65280, %ebx + xorl %ebx, %eax + movl (%ebp,%esi,8), %ebx + andl $16711680, %ebx + xorl %ebx, %eax + xorl 2048(%ebp,%ecx,4),%eax + movl %eax, 24(%edi) + xorl 4(%edi), %eax + movl %eax, 28(%edi) + xorl 8(%edi), %eax + movl %eax, 32(%edi) + xorl 12(%edi), %eax + movl %eax, 36(%edi) + cmpl $7, %ecx + je .L04012break + incl %ecx + xorl 16(%edi), %eax + movl %eax, 40(%edi) + xorl 20(%edi), %eax + movl %eax, 44(%edi) + addl $24, %edi + jmp .L03912loop +.L04012break: + movl $12, 72(%edi) + xorl %eax, %eax + jmp .L035exit +.L03414rounds: + movl (%esi), %eax + movl 4(%esi), %ebx + movl 8(%esi), %ecx + movl 12(%esi), %edx + movl %eax, (%edi) + movl %ebx, 4(%edi) + movl %ecx, 8(%edi) + movl %edx, 12(%edi) + movl 16(%esi), %eax + movl 20(%esi), %ebx + movl 24(%esi), %ecx + movl 28(%esi), %edx + movl %eax, 16(%edi) + movl %ebx, 20(%edi) + movl %ecx, 24(%edi) + movl %edx, 28(%edi) + xorl %ecx, %ecx + jmp .L04114shortcut +.align 4 +.L04214loop: + movl 28(%edi), %edx +.L04114shortcut: + movl (%edi), %eax + movzbl %dl, %esi + movl 2(%ebp,%esi,8), %ebx + movzbl %dh, %esi + andl $4278190080, %ebx + xorl %ebx, %eax + movl 2(%ebp,%esi,8), %ebx + shrl $16, %edx + andl $255, %ebx + movzbl %dl, %esi + xorl %ebx, %eax + movl (%ebp,%esi,8), %ebx + movzbl %dh, %esi + andl $65280, %ebx + xorl %ebx, %eax + movl (%ebp,%esi,8), %ebx + andl $16711680, %ebx + xorl %ebx, %eax + xorl 2048(%ebp,%ecx,4),%eax + movl %eax, 32(%edi) + xorl 4(%edi), %eax + movl %eax, 36(%edi) + xorl 8(%edi), %eax + movl %eax, 40(%edi) + xorl 12(%edi), %eax + movl %eax, 44(%edi) + cmpl $6, %ecx + je .L04314break + incl %ecx + movl %eax, %edx + movl 16(%edi), %eax + movzbl %dl, %esi + movl 2(%ebp,%esi,8), %ebx + movzbl %dh, %esi + andl $255, %ebx + xorl %ebx, %eax + movl (%ebp,%esi,8), %ebx + shrl $16, %edx + andl $65280, %ebx + movzbl %dl, %esi + xorl %ebx, %eax + movl (%ebp,%esi,8), %ebx + movzbl %dh, %esi + andl $16711680, %ebx + xorl %ebx, %eax + movl 2(%ebp,%esi,8), %ebx + andl $4278190080, %ebx + xorl %ebx, %eax + movl %eax, 48(%edi) + xorl 20(%edi), %eax + movl %eax, 52(%edi) + xorl 24(%edi), %eax + movl %eax, 56(%edi) + xorl 28(%edi), %eax + movl %eax, 60(%edi) + addl $32, %edi + jmp .L04214loop +.L04314break: + movl $14, 48(%edi) + xorl %eax, %eax + jmp .L035exit +.L030badpointer: + movl $-1, %eax +.L035exit: + popl %edi + popl %esi + popl %ebx + popl %ebp + ret +.L_AES_set_encrypt_key_end: +.size AES_set_encrypt_key,.L_AES_set_encrypt_key_end-AES_set_encrypt_key +.ident "AES_set_encrypt_key" +.globl AES_Td +.globl AES_Te +.text +.globl AES_set_decrypt_key +.type AES_set_decrypt_key,@function +.align 16 +AES_set_decrypt_key: + movl 4(%esp), %eax + movl 8(%esp), %ecx + movl 12(%esp), %edx + subl $12, %esp + movl %eax, (%esp) + movl %ecx, 4(%esp) + movl %edx, 8(%esp) + call AES_set_encrypt_key + addl $12, %esp + cmpl $0, %eax + je .L044proceed + ret +.L044proceed: + pushl %ebp + pushl %ebx + pushl %esi + pushl %edi + movl 28(%esp), %esi + movl 240(%esi), %ecx + leal (,%ecx,4), %ecx + leal (%esi,%ecx,4), %edi +.align 4 +.L045invert: + movl (%esi), %eax + movl 4(%esi), %ebx + movl (%edi), %ecx + movl 4(%edi), %edx + movl %eax, (%edi) + movl %ebx, 4(%edi) + movl %ecx, (%esi) + movl %edx, 4(%esi) + movl 8(%esi), %eax + movl 12(%esi), %ebx + movl 8(%edi), %ecx + movl 12(%edi), %edx + movl %eax, 8(%edi) + movl %ebx, 12(%edi) + movl %ecx, 8(%esi) + movl %edx, 12(%esi) + addl $16, %esi + subl $16, %edi + cmpl %edi, %esi + jne .L045invert + call .L046pic_point +.L046pic_point: + popl %ebp + leal AES_Td-.L046pic_point(%ebp),%edi + leal AES_Te-.L046pic_point(%ebp),%ebp + movl 28(%esp), %esi + movl 240(%esi), %ecx + decl %ecx +.align 4 +.L047permute: + addl $16, %esi + movl (%esi), %eax + movl %eax, %edx + movzbl %ah, %ebx + shrl $16, %edx + andl $255, %eax + movzbl 2(%ebp,%eax,8), %eax + movzbl 2(%ebp,%ebx,8), %ebx + movl (%edi,%eax,8), %eax + xorl 3(%edi,%ebx,8), %eax + movzbl %dh, %ebx + andl $255, %edx + movzbl 2(%ebp,%edx,8), %edx + movzbl 2(%ebp,%ebx,8), %ebx + xorl 2(%edi,%edx,8), %eax + xorl 1(%edi,%ebx,8), %eax + movl %eax, (%esi) + movl 4(%esi), %eax + movl %eax, %edx + movzbl %ah, %ebx + shrl $16, %edx + andl $255, %eax + movzbl 2(%ebp,%eax,8), %eax + movzbl 2(%ebp,%ebx,8), %ebx + movl (%edi,%eax,8), %eax + xorl 3(%edi,%ebx,8), %eax + movzbl %dh, %ebx + andl $255, %edx + movzbl 2(%ebp,%edx,8), %edx + movzbl 2(%ebp,%ebx,8), %ebx + xorl 2(%edi,%edx,8), %eax + xorl 1(%edi,%ebx,8), %eax + movl %eax, 4(%esi) + movl 8(%esi), %eax + movl %eax, %edx + movzbl %ah, %ebx + shrl $16, %edx + andl $255, %eax + movzbl 2(%ebp,%eax,8), %eax + movzbl 2(%ebp,%ebx,8), %ebx + movl (%edi,%eax,8), %eax + xorl 3(%edi,%ebx,8), %eax + movzbl %dh, %ebx + andl $255, %edx + movzbl 2(%ebp,%edx,8), %edx + movzbl 2(%ebp,%ebx,8), %ebx + xorl 2(%edi,%edx,8), %eax + xorl 1(%edi,%ebx,8), %eax + movl %eax, 8(%esi) + movl 12(%esi), %eax + movl %eax, %edx + movzbl %ah, %ebx + shrl $16, %edx + andl $255, %eax + movzbl 2(%ebp,%eax,8), %eax + movzbl 2(%ebp,%ebx,8), %ebx + movl (%edi,%eax,8), %eax + xorl 3(%edi,%ebx,8), %eax + movzbl %dh, %ebx + andl $255, %edx + movzbl 2(%ebp,%edx,8), %edx + movzbl 2(%ebp,%ebx,8), %ebx + xorl 2(%edi,%edx,8), %eax + xorl 1(%edi,%ebx,8), %eax + movl %eax, 12(%esi) + decl %ecx + jnz .L047permute + xorl %eax, %eax + popl %edi + popl %esi + popl %ebx + popl %ebp + ret +.L_AES_set_decrypt_key_end: +.size AES_set_decrypt_key,.L_AES_set_decrypt_key_end-AES_set_decrypt_key +.ident "AES_set_decrypt_key" Property changes on: secure/lib/libcrypto/i386/aes-586.s ___________________________________________________________________ Added: svn:mime-type + text/plain Added: svn:keywords + FreeBSD=%H Added: svn:eol-style + native Index: secure/lib/libcrypto/i386/des-586.s =================================================================== --- secure/lib/libcrypto/i386/des-586.s (revision 222101) +++ secure/lib/libcrypto/i386/des-586.s (working copy) @@ -6,12 +6,10 @@ .file "des-586.s" - .version "01.01" -gcc2_compiled.: .text - .align 16 -.globl DES_encrypt1 - .type DES_encrypt1,@function +.globl DES_encrypt1 +.type DES_encrypt1,@function +.align 16 DES_encrypt1: pushl %esi pushl %edi @@ -1078,12 +1076,12 @@ DES_encrypt1: popl %esi ret .L_DES_encrypt1_end: - .size DES_encrypt1,.L_DES_encrypt1_end-DES_encrypt1 -.ident "desasm.pl" +.size DES_encrypt1,.L_DES_encrypt1_end-DES_encrypt1 +.ident "DES_encrypt1" .text - .align 16 -.globl DES_encrypt2 - .type DES_encrypt2,@function +.globl DES_encrypt2 +.type DES_encrypt2,@function +.align 16 DES_encrypt2: pushl %esi pushl %edi @@ -2078,12 +2076,12 @@ DES_encrypt2: popl %esi ret .L_DES_encrypt2_end: - .size DES_encrypt2,.L_DES_encrypt2_end-DES_encrypt2 -.ident "desasm.pl" +.size DES_encrypt2,.L_DES_encrypt2_end-DES_encrypt2 +.ident "DES_encrypt2" .text - .align 16 -.globl DES_encrypt3 - .type DES_encrypt3,@function +.globl DES_encrypt3 +.type DES_encrypt3,@function +.align 16 DES_encrypt3: pushl %ebx movl 8(%esp), %ebx @@ -2201,12 +2199,12 @@ DES_encrypt3: popl %ebx ret .L_DES_encrypt3_end: - .size DES_encrypt3,.L_DES_encrypt3_end-DES_encrypt3 -.ident "desasm.pl" +.size DES_encrypt3,.L_DES_encrypt3_end-DES_encrypt3 +.ident "DES_encrypt3" .text - .align 16 -.globl DES_decrypt3 - .type DES_decrypt3,@function +.globl DES_decrypt3 +.type DES_decrypt3,@function +.align 16 DES_decrypt3: pushl %ebx movl 8(%esp), %ebx @@ -2324,12 +2322,12 @@ DES_decrypt3: popl %ebx ret .L_DES_decrypt3_end: - .size DES_decrypt3,.L_DES_decrypt3_end-DES_decrypt3 -.ident "desasm.pl" +.size DES_decrypt3,.L_DES_decrypt3_end-DES_decrypt3 +.ident "DES_decrypt3" .text - .align 16 -.globl DES_ncbc_encrypt - .type DES_ncbc_encrypt,@function +.globl DES_ncbc_encrypt +.type DES_ncbc_encrypt,@function +.align 16 DES_ncbc_encrypt: pushl %ebp @@ -2496,23 +2494,24 @@ DES_ncbc_encrypt: popl %ebx popl %ebp ret -.align 16 +.align 64 .L009cbc_enc_jmp_table: - .long 0 - .long .L017ej1-.L008PIC_point - .long .L016ej2-.L008PIC_point - .long .L015ej3-.L008PIC_point - .long .L013ej4-.L008PIC_point - .long .L012ej5-.L008PIC_point - .long .L011ej6-.L008PIC_point - .long .L010ej7-.L008PIC_point + .long 0 + .long .L017ej1-.L008PIC_point + .long .L016ej2-.L008PIC_point + .long .L015ej3-.L008PIC_point + .long .L013ej4-.L008PIC_point + .long .L012ej5-.L008PIC_point + .long .L011ej6-.L008PIC_point + .long .L010ej7-.L008PIC_point +.align 64 .L_DES_ncbc_encrypt_end: - .size DES_ncbc_encrypt,.L_DES_ncbc_encrypt_end-DES_ncbc_encrypt -.ident "desasm.pl" +.size DES_ncbc_encrypt,.L_DES_ncbc_encrypt_end-DES_ncbc_encrypt +.ident "DES_ncbc_encrypt" .text - .align 16 -.globl DES_ede3_cbc_encrypt - .type DES_ede3_cbc_encrypt,@function +.globl DES_ede3_cbc_encrypt +.type DES_ede3_cbc_encrypt,@function +.align 16 DES_ede3_cbc_encrypt: pushl %ebp @@ -2683,16 +2682,17 @@ DES_ede3_cbc_encrypt: popl %ebx popl %ebp ret -.align 16 +.align 64 .L033cbc_enc_jmp_table: - .long 0 - .long .L041ej1-.L032PIC_point - .long .L040ej2-.L032PIC_point - .long .L039ej3-.L032PIC_point - .long .L037ej4-.L032PIC_point - .long .L036ej5-.L032PIC_point - .long .L035ej6-.L032PIC_point - .long .L034ej7-.L032PIC_point + .long 0 + .long .L041ej1-.L032PIC_point + .long .L040ej2-.L032PIC_point + .long .L039ej3-.L032PIC_point + .long .L037ej4-.L032PIC_point + .long .L036ej5-.L032PIC_point + .long .L035ej6-.L032PIC_point + .long .L034ej7-.L032PIC_point +.align 64 .L_DES_ede3_cbc_encrypt_end: - .size DES_ede3_cbc_encrypt,.L_DES_ede3_cbc_encrypt_end-DES_ede3_cbc_encrypt -.ident "desasm.pl" +.size DES_ede3_cbc_encrypt,.L_DES_ede3_cbc_encrypt_end-DES_ede3_cbc_encrypt +.ident "DES_ede3_cbc_encrypt" Index: secure/lib/libcrypto/i386/rmd-586.s =================================================================== --- secure/lib/libcrypto/i386/rmd-586.s (revision 222101) +++ secure/lib/libcrypto/i386/rmd-586.s (working copy) @@ -5,14 +5,12 @@ - .file "/usr/src/secure/lib/libcrypto/../../../crypto/openssl/crypto/ripemd/asm/rmd-586.s" - .version "01.01" -gcc2_compiled.: + .file "rmd-586.s" .text - .align 16 -.globl ripemd160_block_asm_host_order - .type ripemd160_block_asm_host_order,@function -ripemd160_block_asm_host_order: +.globl ripemd160_block_asm_data_order +.type ripemd160_block_asm_data_order,@function +.align 16 +ripemd160_block_asm_data_order: movl 4(%esp), %edx movl 8(%esp), %eax pushl %esi @@ -1970,6 +1968,6 @@ popl %edi popl %esi ret -.L_ripemd160_block_asm_host_order_end: - .size ripemd160_block_asm_host_order,.L_ripemd160_block_asm_host_order_end-ripemd160_block_asm_host_order -.ident "desasm.pl" +.L_ripemd160_block_asm_data_order_end: +.size ripemd160_block_asm_data_order,.L_ripemd160_block_asm_data_order_end-ripemd160_block_asm_data_order +.ident "ripemd160_block_asm_data_order" Index: secure/lib/libcrypto/i386/crypt586.s =================================================================== --- secure/lib/libcrypto/i386/crypt586.s (revision 222101) +++ secure/lib/libcrypto/i386/crypt586.s (working copy) @@ -6,12 +6,10 @@ .file "crypt586.s" - .version "01.01" -gcc2_compiled.: .text - .align 16 -.globl fcrypt_body - .type fcrypt_body,@function +.globl fcrypt_body +.type fcrypt_body,@function +.align 16 fcrypt_body: pushl %ebp pushl %ebx @@ -883,5 +881,5 @@ fcrypt_body: popl %ebp ret .L_fcrypt_body_end: - .size fcrypt_body,.L_fcrypt_body_end-fcrypt_body +.size fcrypt_body,.L_fcrypt_body_end-fcrypt_body .ident "fcrypt_body" Index: secure/lib/libcrypto/i386/md5-586.s =================================================================== --- secure/lib/libcrypto/i386/md5-586.s (revision 222101) +++ secure/lib/libcrypto/i386/md5-586.s (working copy) @@ -5,14 +5,12 @@ - .file "/usr/src/secure/lib/libcrypto/../../../crypto/openssl/crypto/md5/asm/md5-586.s" - .version "01.01" -gcc2_compiled.: + .file "md5-586.s" .text - .align 16 -.globl md5_block_asm_host_order - .type md5_block_asm_host_order,@function -md5_block_asm_host_order: +.globl md5_block_asm_data_order +.type md5_block_asm_data_order,@function +.align 16 +md5_block_asm_data_order: pushl %esi pushl %edi movl 12(%esp), %edi @@ -684,6 +682,6 @@ popl %edi popl %esi ret -.L_md5_block_asm_host_order_end: - .size md5_block_asm_host_order,.L_md5_block_asm_host_order_end-md5_block_asm_host_order -.ident "desasm.pl" +.L_md5_block_asm_data_order_end: +.size md5_block_asm_data_order,.L_md5_block_asm_data_order_end-md5_block_asm_data_order +.ident "md5_block_asm_data_order" Index: secure/lib/libcrypto/i386/bf-586.s =================================================================== --- secure/lib/libcrypto/i386/bf-586.s (revision 222101) +++ secure/lib/libcrypto/i386/bf-586.s (working copy) @@ -6,12 +6,10 @@ .file "bf-586.s" - .version "01.01" -gcc2_compiled.: .text - .align 16 -.globl BF_encrypt - .type BF_encrypt,@function +.globl BF_encrypt +.type BF_encrypt,@function +.align 16 BF_encrypt: pushl %ebp @@ -358,12 +356,12 @@ BF_encrypt: popl %ebp ret .L_BF_encrypt_end: - .size BF_encrypt,.L_BF_encrypt_end-BF_encrypt +.size BF_encrypt,.L_BF_encrypt_end-BF_encrypt .ident "BF_encrypt" .text - .align 16 -.globl BF_decrypt - .type BF_decrypt,@function +.globl BF_decrypt +.type BF_decrypt,@function +.align 16 BF_decrypt: pushl %ebp @@ -710,12 +708,12 @@ BF_decrypt: popl %ebp ret .L_BF_decrypt_end: - .size BF_decrypt,.L_BF_decrypt_end-BF_decrypt +.size BF_decrypt,.L_BF_decrypt_end-BF_decrypt .ident "BF_decrypt" .text - .align 16 -.globl BF_cbc_encrypt - .type BF_cbc_encrypt,@function +.globl BF_cbc_encrypt +.type BF_cbc_encrypt,@function +.align 16 BF_cbc_encrypt: pushl %ebp @@ -912,16 +910,17 @@ BF_cbc_encrypt: popl %ebx popl %ebp ret -.align 16 +.align 64 .L005cbc_enc_jmp_table: - .long 0 - .long .L013ej1-.L004PIC_point - .long .L012ej2-.L004PIC_point - .long .L011ej3-.L004PIC_point - .long .L009ej4-.L004PIC_point - .long .L008ej5-.L004PIC_point - .long .L007ej6-.L004PIC_point - .long .L006ej7-.L004PIC_point + .long 0 + .long .L013ej1-.L004PIC_point + .long .L012ej2-.L004PIC_point + .long .L011ej3-.L004PIC_point + .long .L009ej4-.L004PIC_point + .long .L008ej5-.L004PIC_point + .long .L007ej6-.L004PIC_point + .long .L006ej7-.L004PIC_point +.align 64 .L_BF_cbc_encrypt_end: - .size BF_cbc_encrypt,.L_BF_cbc_encrypt_end-BF_cbc_encrypt -.ident "desasm.pl" +.size BF_cbc_encrypt,.L_BF_cbc_encrypt_end-BF_cbc_encrypt +.ident "BF_cbc_encrypt" Index: secure/lib/libcrypto/i386/bf-686.s =================================================================== --- secure/lib/libcrypto/i386/bf-686.s (revision 222101) +++ secure/lib/libcrypto/i386/bf-686.s (working copy) @@ -6,12 +6,10 @@ .file "bf-686.s" - .version "01.01" -gcc2_compiled.: .text - .align 16 -.globl BF_encrypt - .type BF_encrypt,@function +.globl BF_encrypt +.type BF_encrypt,@function +.align 16 BF_encrypt: pushl %ebp pushl %ebx @@ -343,12 +341,12 @@ BF_encrypt: popl %ebp ret .L_BF_encrypt_end: - .size BF_encrypt,.L_BF_encrypt_end-BF_encrypt -.ident "desasm.pl" +.size BF_encrypt,.L_BF_encrypt_end-BF_encrypt +.ident "BF_encrypt" .text - .align 16 -.globl BF_decrypt - .type BF_decrypt,@function +.globl BF_decrypt +.type BF_decrypt,@function +.align 16 BF_decrypt: pushl %ebp pushl %ebx @@ -680,12 +678,12 @@ BF_decrypt: popl %ebp ret .L_BF_decrypt_end: - .size BF_decrypt,.L_BF_decrypt_end-BF_decrypt -.ident "desasm.pl" +.size BF_decrypt,.L_BF_decrypt_end-BF_decrypt +.ident "BF_decrypt" .text - .align 16 -.globl BF_cbc_encrypt - .type BF_cbc_encrypt,@function +.globl BF_cbc_encrypt +.type BF_cbc_encrypt,@function +.align 16 BF_cbc_encrypt: pushl %ebp @@ -882,16 +880,17 @@ BF_cbc_encrypt: popl %ebx popl %ebp ret -.align 16 +.align 64 .L005cbc_enc_jmp_table: - .long 0 - .long .L013ej1-.L004PIC_point - .long .L012ej2-.L004PIC_point - .long .L011ej3-.L004PIC_point - .long .L009ej4-.L004PIC_point - .long .L008ej5-.L004PIC_point - .long .L007ej6-.L004PIC_point - .long .L006ej7-.L004PIC_point + .long 0 + .long .L013ej1-.L004PIC_point + .long .L012ej2-.L004PIC_point + .long .L011ej3-.L004PIC_point + .long .L009ej4-.L004PIC_point + .long .L008ej5-.L004PIC_point + .long .L007ej6-.L004PIC_point + .long .L006ej7-.L004PIC_point +.align 64 .L_BF_cbc_encrypt_end: - .size BF_cbc_encrypt,.L_BF_cbc_encrypt_end-BF_cbc_encrypt -.ident "desasm.pl" +.size BF_cbc_encrypt,.L_BF_cbc_encrypt_end-BF_cbc_encrypt +.ident "BF_cbc_encrypt" Index: secure/lib/libcrypto/i386/rc4-586.s =================================================================== --- secure/lib/libcrypto/i386/rc4-586.s (revision 222101) +++ secure/lib/libcrypto/i386/rc4-586.s (working copy) @@ -6,311 +6,292 @@ .file "rc4-586.s" - .version "01.01" -gcc2_compiled.: .text - .align 16 -.globl RC4 - .type RC4,@function +.globl RC4 +.type RC4,@function +.align 16 RC4: + movl 8(%esp), %edx + cmpl $0, %edx + jne .L000proceed + ret +.L000proceed: pushl %ebp pushl %ebx - movl 12(%esp), %ebp - movl 16(%esp), %ebx pushl %esi + xorl %eax, %eax pushl %edi - movl (%ebp), %ecx - movl 4(%ebp), %edx + xorl %ebx, %ebx + movl 20(%esp), %ebp movl 28(%esp), %esi - incl %ecx + movb (%ebp), %al + movb 4(%ebp), %bl + movl 32(%esp), %edi + incb %al subl $12, %esp addl $8, %ebp - andl $255, %ecx - leal -8(%ebx,%esi), %ebx - movl 44(%esp), %edi - movl %ebx, 8(%esp) - movl (%ebp,%ecx,4), %eax - cmpl %esi, %ebx - jl .L000end -.L001start: + cmpl $-1, 256(%ebp) + je .L001RC4_CHAR + leal -8(%edx,%esi), %edx + movl %edx, 8(%esp) + movl (%ebp,%eax,4), %ecx + cmpl %esi, %edx + jb .L002end +.L003start: addl $8, %esi - addl %eax, %edx + addb %cl, %bl + movl (%ebp,%ebx,4), %edx + movl %edx, (%ebp,%eax,4) + addl %ecx, %edx + movl %ecx, (%ebp,%ebx,4) andl $255, %edx - incl %ecx - movl (%ebp,%edx,4), %ebx - movl %ebx, -4(%ebp,%ecx,4) - addl %eax, %ebx - andl $255, %ecx - andl $255, %ebx - movl %eax, (%ebp,%edx,4) - nop - movl (%ebp,%ebx,4), %ebx - movl (%ebp,%ecx,4), %eax - movb %bl, (%esp) + incb %al + movl (%ebp,%eax,4), %ecx + movl (%ebp,%edx,4), %edx + movb %dl, (%esp) - addl %eax, %edx + addb %cl, %bl + movl (%ebp,%ebx,4), %edx + movl %edx, (%ebp,%eax,4) + addl %ecx, %edx + movl %ecx, (%ebp,%ebx,4) andl $255, %edx - incl %ecx - movl (%ebp,%edx,4), %ebx - movl %ebx, -4(%ebp,%ecx,4) - addl %eax, %ebx - andl $255, %ecx - andl $255, %ebx - movl %eax, (%ebp,%edx,4) - nop - movl (%ebp,%ebx,4), %ebx - movl (%ebp,%ecx,4), %eax - movb %bl, 1(%esp) + incb %al + movl (%ebp,%eax,4), %ecx + movl (%ebp,%edx,4), %edx + movb %dl, 1(%esp) - addl %eax, %edx + addb %cl, %bl + movl (%ebp,%ebx,4), %edx + movl %edx, (%ebp,%eax,4) + addl %ecx, %edx + movl %ecx, (%ebp,%ebx,4) andl $255, %edx - incl %ecx - movl (%ebp,%edx,4), %ebx - movl %ebx, -4(%ebp,%ecx,4) - addl %eax, %ebx - andl $255, %ecx - andl $255, %ebx - movl %eax, (%ebp,%edx,4) - nop - movl (%ebp,%ebx,4), %ebx - movl (%ebp,%ecx,4), %eax - movb %bl, 2(%esp) + incb %al + movl (%ebp,%eax,4), %ecx + movl (%ebp,%edx,4), %edx + movb %dl, 2(%esp) - addl %eax, %edx + addb %cl, %bl + movl (%ebp,%ebx,4), %edx + movl %edx, (%ebp,%eax,4) + addl %ecx, %edx + movl %ecx, (%ebp,%ebx,4) andl $255, %edx - incl %ecx - movl (%ebp,%edx,4), %ebx - movl %ebx, -4(%ebp,%ecx,4) - addl %eax, %ebx - andl $255, %ecx - andl $255, %ebx - movl %eax, (%ebp,%edx,4) - nop - movl (%ebp,%ebx,4), %ebx - movl (%ebp,%ecx,4), %eax - movb %bl, 3(%esp) + incb %al + movl (%ebp,%eax,4), %ecx + movl (%ebp,%edx,4), %edx + movb %dl, 3(%esp) - addl %eax, %edx + addb %cl, %bl + movl (%ebp,%ebx,4), %edx + movl %edx, (%ebp,%eax,4) + addl %ecx, %edx + movl %ecx, (%ebp,%ebx,4) andl $255, %edx - incl %ecx - movl (%ebp,%edx,4), %ebx - movl %ebx, -4(%ebp,%ecx,4) - addl %eax, %ebx - andl $255, %ecx - andl $255, %ebx - movl %eax, (%ebp,%edx,4) - nop - movl (%ebp,%ebx,4), %ebx - movl (%ebp,%ecx,4), %eax - movb %bl, 4(%esp) + incb %al + movl (%ebp,%eax,4), %ecx + movl (%ebp,%edx,4), %edx + movb %dl, 4(%esp) - addl %eax, %edx + addb %cl, %bl + movl (%ebp,%ebx,4), %edx + movl %edx, (%ebp,%eax,4) + addl %ecx, %edx + movl %ecx, (%ebp,%ebx,4) andl $255, %edx - incl %ecx - movl (%ebp,%edx,4), %ebx - movl %ebx, -4(%ebp,%ecx,4) - addl %eax, %ebx - andl $255, %ecx - andl $255, %ebx - movl %eax, (%ebp,%edx,4) - nop - movl (%ebp,%ebx,4), %ebx - movl (%ebp,%ecx,4), %eax - movb %bl, 5(%esp) + incb %al + movl (%ebp,%eax,4), %ecx + movl (%ebp,%edx,4), %edx + movb %dl, 5(%esp) - addl %eax, %edx + addb %cl, %bl + movl (%ebp,%ebx,4), %edx + movl %edx, (%ebp,%eax,4) + addl %ecx, %edx + movl %ecx, (%ebp,%ebx,4) andl $255, %edx - incl %ecx - movl (%ebp,%edx,4), %ebx - movl %ebx, -4(%ebp,%ecx,4) - addl %eax, %ebx - andl $255, %ecx - andl $255, %ebx - movl %eax, (%ebp,%edx,4) - nop - movl (%ebp,%ebx,4), %ebx - movl (%ebp,%ecx,4), %eax - movb %bl, 6(%esp) + incb %al + movl (%ebp,%eax,4), %ecx + movl (%ebp,%edx,4), %edx + movb %dl, 6(%esp) - addl %eax, %edx + addb %cl, %bl + movl (%ebp,%ebx,4), %edx + movl %edx, (%ebp,%eax,4) + addl %ecx, %edx + movl %ecx, (%ebp,%ebx,4) andl $255, %edx - incl %ecx - movl (%ebp,%edx,4), %ebx - movl %ebx, -4(%ebp,%ecx,4) - addl %eax, %ebx - andl $255, %ecx - andl $255, %ebx - movl %eax, (%ebp,%edx,4) - nop - movl (%ebp,%ebx,4), %ebx + incb %al + movl (%ebp,%edx,4), %edx addl $8, %edi - movb %bl, 7(%esp) + movb %dl, 7(%esp) - movl (%esp), %eax - movl -8(%esi), %ebx - xorl %ebx, %eax - movl -4(%esi), %ebx - movl %eax, -8(%edi) - movl 4(%esp), %eax - xorl %ebx, %eax - movl 8(%esp), %ebx - movl %eax, -4(%edi) - movl (%ebp,%ecx,4), %eax - cmpl %ebx, %esi - jle .L001start -.L000end: + movl (%esp), %ecx + movl -8(%esi), %edx + xorl %edx, %ecx + movl -4(%esi), %edx + movl %ecx, -8(%edi) + movl 4(%esp), %ecx + xorl %edx, %ecx + movl 8(%esp), %edx + movl %ecx, -4(%edi) + movl (%ebp,%eax,4), %ecx + cmpl %edx, %esi + jbe .L003start +.L002end: - addl $8, %ebx + addl $8, %edx incl %esi - cmpl %esi, %ebx - jl .L002finished - movl %ebx, 8(%esp) - addl %eax, %edx + cmpl %esi, %edx + jb .L004finished + movl %edx, 8(%esp) + addb %cl, %bl + movl (%ebp,%ebx,4), %edx + movl %edx, (%ebp,%eax,4) + addl %ecx, %edx + movl %ecx, (%ebp,%ebx,4) andl $255, %edx - incl %ecx - movl (%ebp,%edx,4), %ebx - movl %ebx, -4(%ebp,%ecx,4) - addl %eax, %ebx - andl $255, %ecx - andl $255, %ebx - movl %eax, (%ebp,%edx,4) - nop - movl (%ebp,%ebx,4), %ebx - movl (%ebp,%ecx,4), %eax - movb -1(%esi), %bh - xorb %bh, %bl - movb %bl, (%edi) + incb %al + movl (%ebp,%eax,4), %ecx + movl (%ebp,%edx,4), %edx + movb -1(%esi), %dh + xorb %dh, %dl + movb %dl, (%edi) - movl 8(%esp), %ebx - cmpl %esi, %ebx - jle .L002finished + movl 8(%esp), %edx + cmpl %esi, %edx + jbe .L004finished incl %esi - addl %eax, %edx + addb %cl, %bl + movl (%ebp,%ebx,4), %edx + movl %edx, (%ebp,%eax,4) + addl %ecx, %edx + movl %ecx, (%ebp,%ebx,4) andl $255, %edx - incl %ecx - movl (%ebp,%edx,4), %ebx - movl %ebx, -4(%ebp,%ecx,4) - addl %eax, %ebx - andl $255, %ecx - andl $255, %ebx - movl %eax, (%ebp,%edx,4) - nop - movl (%ebp,%ebx,4), %ebx - movl (%ebp,%ecx,4), %eax - movb -1(%esi), %bh - xorb %bh, %bl - movb %bl, 1(%edi) + incb %al + movl (%ebp,%eax,4), %ecx + movl (%ebp,%edx,4), %edx + movb -1(%esi), %dh + xorb %dh, %dl + movb %dl, 1(%edi) - movl 8(%esp), %ebx - cmpl %esi, %ebx - jle .L002finished + movl 8(%esp), %edx + cmpl %esi, %edx + jbe .L004finished incl %esi - addl %eax, %edx + addb %cl, %bl + movl (%ebp,%ebx,4), %edx + movl %edx, (%ebp,%eax,4) + addl %ecx, %edx + movl %ecx, (%ebp,%ebx,4) andl $255, %edx - incl %ecx - movl (%ebp,%edx,4), %ebx - movl %ebx, -4(%ebp,%ecx,4) - addl %eax, %ebx - andl $255, %ecx - andl $255, %ebx - movl %eax, (%ebp,%edx,4) - nop - movl (%ebp,%ebx,4), %ebx - movl (%ebp,%ecx,4), %eax - movb -1(%esi), %bh - xorb %bh, %bl - movb %bl, 2(%edi) + incb %al + movl (%ebp,%eax,4), %ecx + movl (%ebp,%edx,4), %edx + movb -1(%esi), %dh + xorb %dh, %dl + movb %dl, 2(%edi) - movl 8(%esp), %ebx - cmpl %esi, %ebx - jle .L002finished + movl 8(%esp), %edx + cmpl %esi, %edx + jbe .L004finished incl %esi - addl %eax, %edx + addb %cl, %bl + movl (%ebp,%ebx,4), %edx + movl %edx, (%ebp,%eax,4) + addl %ecx, %edx + movl %ecx, (%ebp,%ebx,4) andl $255, %edx - incl %ecx - movl (%ebp,%edx,4), %ebx - movl %ebx, -4(%ebp,%ecx,4) - addl %eax, %ebx - andl $255, %ecx - andl $255, %ebx - movl %eax, (%ebp,%edx,4) - nop - movl (%ebp,%ebx,4), %ebx - movl (%ebp,%ecx,4), %eax - movb -1(%esi), %bh - xorb %bh, %bl - movb %bl, 3(%edi) + incb %al + movl (%ebp,%eax,4), %ecx + movl (%ebp,%edx,4), %edx + movb -1(%esi), %dh + xorb %dh, %dl + movb %dl, 3(%edi) - movl 8(%esp), %ebx - cmpl %esi, %ebx - jle .L002finished + movl 8(%esp), %edx + cmpl %esi, %edx + jbe .L004finished incl %esi - addl %eax, %edx + addb %cl, %bl + movl (%ebp,%ebx,4), %edx + movl %edx, (%ebp,%eax,4) + addl %ecx, %edx + movl %ecx, (%ebp,%ebx,4) andl $255, %edx - incl %ecx - movl (%ebp,%edx,4), %ebx - movl %ebx, -4(%ebp,%ecx,4) - addl %eax, %ebx - andl $255, %ecx - andl $255, %ebx - movl %eax, (%ebp,%edx,4) - nop - movl (%ebp,%ebx,4), %ebx - movl (%ebp,%ecx,4), %eax - movb -1(%esi), %bh - xorb %bh, %bl - movb %bl, 4(%edi) + incb %al + movl (%ebp,%eax,4), %ecx + movl (%ebp,%edx,4), %edx + movb -1(%esi), %dh + xorb %dh, %dl + movb %dl, 4(%edi) - movl 8(%esp), %ebx - cmpl %esi, %ebx - jle .L002finished + movl 8(%esp), %edx + cmpl %esi, %edx + jbe .L004finished incl %esi - addl %eax, %edx + addb %cl, %bl + movl (%ebp,%ebx,4), %edx + movl %edx, (%ebp,%eax,4) + addl %ecx, %edx + movl %ecx, (%ebp,%ebx,4) andl $255, %edx - incl %ecx - movl (%ebp,%edx,4), %ebx - movl %ebx, -4(%ebp,%ecx,4) - addl %eax, %ebx - andl $255, %ecx - andl $255, %ebx - movl %eax, (%ebp,%edx,4) - nop - movl (%ebp,%ebx,4), %ebx - movl (%ebp,%ecx,4), %eax - movb -1(%esi), %bh - xorb %bh, %bl - movb %bl, 5(%edi) + incb %al + movl (%ebp,%eax,4), %ecx + movl (%ebp,%edx,4), %edx + movb -1(%esi), %dh + xorb %dh, %dl + movb %dl, 5(%edi) - movl 8(%esp), %ebx - cmpl %esi, %ebx - jle .L002finished + movl 8(%esp), %edx + cmpl %esi, %edx + jbe .L004finished incl %esi - addl %eax, %edx + addb %cl, %bl + movl (%ebp,%ebx,4), %edx + movl %edx, (%ebp,%eax,4) + addl %ecx, %edx + movl %ecx, (%ebp,%ebx,4) andl $255, %edx - incl %ecx - movl (%ebp,%edx,4), %ebx - movl %ebx, -4(%ebp,%ecx,4) - addl %eax, %ebx - andl $255, %ecx - andl $255, %ebx - movl %eax, (%ebp,%edx,4) - nop - movl (%ebp,%ebx,4), %ebx - movb -1(%esi), %bh - xorb %bh, %bl - movb %bl, 6(%edi) -.L002finished: - decl %ecx + incb %al + movl (%ebp,%edx,4), %edx + movb -1(%esi), %dh + xorb %dh, %dl + movb %dl, 6(%edi) + jmp .L004finished +.align 16 +.L001RC4_CHAR: + leal (%esi,%edx), %edx + movl %edx, 8(%esp) + movzbl (%ebp,%eax), %ecx +.L005RC4_CHAR_loop: + addb %cl, %bl + movzbl (%ebp,%ebx), %edx + movb %cl, (%ebp,%ebx) + movb %dl, (%ebp,%eax) + addb %cl, %dl + movzbl (%ebp,%edx), %edx + addb $1, %al + xorb (%esi), %dl + leal 1(%esi), %esi + movzbl (%ebp,%eax), %ecx + cmpl 8(%esp), %esi + movb %dl, (%edi) + leal 1(%edi), %edi + jb .L005RC4_CHAR_loop +.L004finished: + decl %eax addl $12, %esp - movl %edx, -4(%ebp) - movb %cl, -8(%ebp) + movb %bl, -4(%ebp) + movb %al, -8(%ebp) popl %edi popl %esi popl %ebx popl %ebp ret .L_RC4_end: - .size RC4,.L_RC4_end-RC4 +.size RC4,.L_RC4_end-RC4 .ident "RC4" Index: secure/lib/libcrypto/i386/rc5-586.s =================================================================== --- secure/lib/libcrypto/i386/rc5-586.s (revision 222101) +++ secure/lib/libcrypto/i386/rc5-586.s (working copy) @@ -6,12 +6,10 @@ .file "rc5-586.s" - .version "01.01" -gcc2_compiled.: .text - .align 16 -.globl RC5_32_encrypt - .type RC5_32_encrypt,@function +.globl RC5_32_encrypt +.type RC5_32_encrypt,@function +.align 16 RC5_32_encrypt: pushl %ebp @@ -199,12 +197,12 @@ RC5_32_encrypt: popl %ebp ret .L_RC5_32_encrypt_end: - .size RC5_32_encrypt,.L_RC5_32_encrypt_end-RC5_32_encrypt -.ident "desasm.pl" +.size RC5_32_encrypt,.L_RC5_32_encrypt_end-RC5_32_encrypt +.ident "RC5_32_encrypt" .text - .align 16 -.globl RC5_32_decrypt - .type RC5_32_decrypt,@function +.globl RC5_32_decrypt +.type RC5_32_decrypt,@function +.align 16 RC5_32_decrypt: pushl %ebp @@ -394,12 +392,12 @@ RC5_32_decrypt: popl %ebp ret .L_RC5_32_decrypt_end: - .size RC5_32_decrypt,.L_RC5_32_decrypt_end-RC5_32_decrypt -.ident "desasm.pl" +.size RC5_32_decrypt,.L_RC5_32_decrypt_end-RC5_32_decrypt +.ident "RC5_32_decrypt" .text - .align 16 -.globl RC5_32_cbc_encrypt - .type RC5_32_cbc_encrypt,@function +.globl RC5_32_cbc_encrypt +.type RC5_32_cbc_encrypt,@function +.align 16 RC5_32_cbc_encrypt: pushl %ebp @@ -564,16 +562,17 @@ RC5_32_cbc_encrypt: popl %ebx popl %ebp ret -.align 16 +.align 64 .L009cbc_enc_jmp_table: - .long 0 - .long .L017ej1-.L008PIC_point - .long .L016ej2-.L008PIC_point - .long .L015ej3-.L008PIC_point - .long .L013ej4-.L008PIC_point - .long .L012ej5-.L008PIC_point - .long .L011ej6-.L008PIC_point - .long .L010ej7-.L008PIC_point + .long 0 + .long .L017ej1-.L008PIC_point + .long .L016ej2-.L008PIC_point + .long .L015ej3-.L008PIC_point + .long .L013ej4-.L008PIC_point + .long .L012ej5-.L008PIC_point + .long .L011ej6-.L008PIC_point + .long .L010ej7-.L008PIC_point +.align 64 .L_RC5_32_cbc_encrypt_end: - .size RC5_32_cbc_encrypt,.L_RC5_32_cbc_encrypt_end-RC5_32_cbc_encrypt -.ident "desasm.pl" +.size RC5_32_cbc_encrypt,.L_RC5_32_cbc_encrypt_end-RC5_32_cbc_encrypt +.ident "RC5_32_cbc_encrypt" Index: secure/lib/libcrypto/i386/cast-586.s =================================================================== --- secure/lib/libcrypto/i386/cast-586.s (revision 222101) +++ secure/lib/libcrypto/i386/cast-586.s (working copy) @@ -6,12 +6,10 @@ .file "cast-586.s" - .version "01.01" -gcc2_compiled.: .text - .align 16 -.globl CAST_encrypt - .type CAST_encrypt,@function +.globl CAST_encrypt +.type CAST_encrypt,@function +.align 16 CAST_encrypt: pushl %ebp @@ -378,12 +376,12 @@ CAST_encrypt: popl %ebp ret .L_CAST_encrypt_end: - .size CAST_encrypt,.L_CAST_encrypt_end-CAST_encrypt +.size CAST_encrypt,.L_CAST_encrypt_end-CAST_encrypt .ident "CAST_encrypt" .text - .align 16 -.globl CAST_decrypt - .type CAST_decrypt,@function +.globl CAST_decrypt +.type CAST_decrypt,@function +.align 16 CAST_decrypt: pushl %ebp @@ -747,12 +745,12 @@ CAST_decrypt: popl %ebp ret .L_CAST_decrypt_end: - .size CAST_decrypt,.L_CAST_decrypt_end-CAST_decrypt +.size CAST_decrypt,.L_CAST_decrypt_end-CAST_decrypt .ident "CAST_decrypt" .text - .align 16 -.globl CAST_cbc_encrypt - .type CAST_cbc_encrypt,@function +.globl CAST_cbc_encrypt +.type CAST_cbc_encrypt,@function +.align 16 CAST_cbc_encrypt: pushl %ebp @@ -951,16 +949,17 @@ CAST_cbc_encrypt: popl %ebx popl %ebp ret -.align 16 +.align 64 .L007cbc_enc_jmp_table: - .long 0 - .long .L015ej1-.L006PIC_point - .long .L014ej2-.L006PIC_point - .long .L013ej3-.L006PIC_point - .long .L011ej4-.L006PIC_point - .long .L010ej5-.L006PIC_point - .long .L009ej6-.L006PIC_point - .long .L008ej7-.L006PIC_point + .long 0 + .long .L015ej1-.L006PIC_point + .long .L014ej2-.L006PIC_point + .long .L013ej3-.L006PIC_point + .long .L011ej4-.L006PIC_point + .long .L010ej5-.L006PIC_point + .long .L009ej6-.L006PIC_point + .long .L008ej7-.L006PIC_point +.align 64 .L_CAST_cbc_encrypt_end: - .size CAST_cbc_encrypt,.L_CAST_cbc_encrypt_end-CAST_cbc_encrypt -.ident "desasm.pl" +.size CAST_cbc_encrypt,.L_CAST_cbc_encrypt_end-CAST_cbc_encrypt +.ident "CAST_cbc_encrypt" Index: secure/lib/libcrypto/i386/x86cpuid.s =================================================================== --- secure/lib/libcrypto/i386/x86cpuid.s (revision 0) +++ secure/lib/libcrypto/i386/x86cpuid.s (revision 0) @@ -0,0 +1,232 @@ + # $FreeBSD$ + + + + + + + .file "x86cpuid.s" +.text +.globl OPENSSL_ia32_cpuid +.type OPENSSL_ia32_cpuid,@function +.align 16 +OPENSSL_ia32_cpuid: + pushl %ebp + pushl %ebx + pushl %esi + pushl %edi + + xorl %edx, %edx + pushfl + popl %eax + movl %eax, %ecx + xorl $2097152, %eax + pushl %eax + popfl + pushfl + popl %eax + xorl %eax, %ecx + btl $21, %ecx + jnc .L000done + xorl %eax, %eax + .byte 0x0f,0xa2 + xorl %eax, %eax + cmpl $1970169159, %ebx + .byte 15,149,192 + movl %eax, %ebp + cmpl $1231384169, %edx + .byte 15,149,192 + orl %eax, %ebp + cmpl $1818588270, %ecx + .byte 15,149,192 + orl %eax, %ebp + movl $1, %eax + .byte 0x0f,0xa2 + cmpl $0, %ebp + jne .L001notP4 + andl $3840, %eax + cmpl $3840, %eax + jne .L001notP4 + orl $1048576, %edx +.L001notP4: + btl $28, %edx + jnc .L000done + shrl $16, %ebx + andl $255, %ebx + cmpl $1, %ebx + ja .L000done + andl $4026531839, %edx +.L000done: + movl %edx, %eax + movl %ecx, %edx + popl %edi + popl %esi + popl %ebx + popl %ebp + ret +.L_OPENSSL_ia32_cpuid_end: +.size OPENSSL_ia32_cpuid,.L_OPENSSL_ia32_cpuid_end-OPENSSL_ia32_cpuid +.ident "OPENSSL_ia32_cpuid" +.text +.globl OPENSSL_rdtsc +.type OPENSSL_rdtsc,@function +.align 16 +OPENSSL_rdtsc: + xorl %eax, %eax + xorl %edx, %edx + leal OPENSSL_ia32cap_P,%ecx + btl $4, (%ecx) + jnc .L002notsc + .byte 0x0f,0x31 +.L002notsc: + ret +.L_OPENSSL_rdtsc_end: +.size OPENSSL_rdtsc,.L_OPENSSL_rdtsc_end-OPENSSL_rdtsc +.ident "OPENSSL_rdtsc" +.text +.globl OPENSSL_instrument_halt +.type OPENSSL_instrument_halt,@function +.align 16 +OPENSSL_instrument_halt: + leal OPENSSL_ia32cap_P,%ecx + btl $4, (%ecx) + jnc .L003nohalt + .long 2421723150 + andl $3, %eax + jnz .L003nohalt + pushfl + popl %eax + btl $9, %eax + jnc .L003nohalt + .byte 0x0f,0x31 + pushl %edx + pushl %eax + hlt + .byte 0x0f,0x31 + subl (%esp), %eax + sbbl 4(%esp), %edx + addl $8, %esp + ret +.L003nohalt: + xorl %eax, %eax + xorl %edx, %edx + ret +.L_OPENSSL_instrument_halt_end: +.size OPENSSL_instrument_halt,.L_OPENSSL_instrument_halt_end-OPENSSL_instrument_halt +.ident "OPENSSL_instrument_halt" +.text +.globl OPENSSL_far_spin +.type OPENSSL_far_spin,@function +.align 16 +OPENSSL_far_spin: + pushfl + popl %eax + btl $9, %eax + jnc .L004nospin + movl 4(%esp), %eax + movl 8(%esp), %ecx + .long 2430111262 + xorl %eax, %eax + movl (%ecx), %edx + jmp .L005spin +.align 16 +.L005spin: + incl %eax + cmpl (%ecx), %edx + je .L005spin + .long 529567888 + ret +.L004nospin: + xorl %eax, %eax + xorl %edx, %edx + ret +.L_OPENSSL_far_spin_end: +.size OPENSSL_far_spin,.L_OPENSSL_far_spin_end-OPENSSL_far_spin +.ident "OPENSSL_far_spin" +.text +.globl OPENSSL_wipe_cpu +.type OPENSSL_wipe_cpu,@function +.align 16 +OPENSSL_wipe_cpu: + xorl %eax, %eax + xorl %edx, %edx + leal OPENSSL_ia32cap_P,%ecx + movl (%ecx), %ecx + btl $1, (%ecx) + jnc .L006no_x87 + btl $26, (%ecx) + jnc .L007no_sse2 + pxor %xmm0, %xmm0 + pxor %xmm1, %xmm1 + pxor %xmm2, %xmm2 + pxor %xmm3, %xmm3 + pxor %xmm4, %xmm4 + pxor %xmm5, %xmm5 + pxor %xmm6, %xmm6 + pxor %xmm7, %xmm7 +.L007no_sse2: + .long 4007259865,4007259865,4007259865,4007259865,2430851995 +.L006no_x87: + leal 4(%esp), %eax + ret +.L_OPENSSL_wipe_cpu_end: +.size OPENSSL_wipe_cpu,.L_OPENSSL_wipe_cpu_end-OPENSSL_wipe_cpu +.ident "OPENSSL_wipe_cpu" +.text +.globl OPENSSL_atomic_add +.type OPENSSL_atomic_add,@function +.align 16 +OPENSSL_atomic_add: + movl 4(%esp), %edx + movl 8(%esp), %ecx + pushl %ebx + nop + movl (%edx), %eax +.L008spin: + leal (%eax,%ecx), %ebx + nop + .long 447811568 + jne .L008spin + movl %ebx, %eax + popl %ebx + ret +.L_OPENSSL_atomic_add_end: +.size OPENSSL_atomic_add,.L_OPENSSL_atomic_add_end-OPENSSL_atomic_add +.ident "OPENSSL_atomic_add" +.text +.globl OPENSSL_indirect_call +.type OPENSSL_indirect_call,@function +.align 16 +OPENSSL_indirect_call: + pushl %ebp + movl %esp, %ebp + subl $28, %esp + movl 12(%ebp), %ecx + movl %ecx, (%esp) + movl 16(%ebp), %edx + movl %edx, 4(%esp) + movl 20(%ebp), %eax + movl %eax, 8(%esp) + movl 24(%ebp), %eax + movl %eax, 12(%esp) + movl 28(%ebp), %eax + movl %eax, 16(%esp) + movl 32(%ebp), %eax + movl %eax, 20(%esp) + movl 36(%ebp), %eax + movl %eax, 24(%esp) + call *8(%ebp) + movl %ebp, %esp + popl %ebp + ret +.L_OPENSSL_indirect_call_end: +.size OPENSSL_indirect_call,.L_OPENSSL_indirect_call_end-OPENSSL_indirect_call +.ident "OPENSSL_indirect_call" +.section .init + call OPENSSL_cpuid_setup + jmp .Linitalign +.align 16 +.Linitalign: + +.section .bss +.comm OPENSSL_ia32cap_P,4,4 Property changes on: secure/lib/libcrypto/i386/x86cpuid.s ___________________________________________________________________ Added: svn:mime-type + text/plain Added: svn:keywords + FreeBSD=%H Added: svn:eol-style + native Index: secure/lib/libcrypto/i386/sha1-586.s =================================================================== --- secure/lib/libcrypto/i386/sha1-586.s (revision 222101) +++ secure/lib/libcrypto/i386/sha1-586.s (working copy) @@ -6,1532 +6,1522 @@ .file "sha1-586.s" - .version "01.01" -gcc2_compiled.: .text - .align 16 -.globl sha1_block_asm_data_order - .type sha1_block_asm_data_order,@function -sha1_block_asm_data_order: - movl 12(%esp), %ecx - pushl %esi - sall $6, %ecx - movl 12(%esp), %esi +.globl sha1_block_data_order +.type sha1_block_data_order,@function +.align 16 +sha1_block_data_order: pushl %ebp - addl %esi, %ecx pushl %ebx - movl 16(%esp), %ebp + pushl %esi pushl %edi - movl 12(%ebp), %edx - subl $108, %esp - movl 16(%ebp), %edi - movl 8(%ebp), %ebx - movl %ecx, 68(%esp) -.L000start: - + movl 20(%esp), %ebp + movl 24(%esp), %esi + movl 28(%esp), %eax + subl $64, %esp + sall $6, %eax + addl %esi, %eax + movl %eax, 92(%esp) + movl 16(%ebp), %edi +.align 16 +.L000loop: movl (%esi), %eax - movl 4(%esi), %ecx + movl 4(%esi), %ebx + movl 8(%esi), %ecx + movl 12(%esi), %edx .byte 15 .byte 200 .byte 15 -.byte 201 - movl %eax, (%esp) - movl %ecx, 4(%esp) - movl 8(%esi), %eax - movl 12(%esi), %ecx +.byte 203 .byte 15 -.byte 200 -.byte 15 .byte 201 - movl %eax, 8(%esp) - movl %ecx, 12(%esp) +.byte 15 +.byte 202 + movl %eax, (%esp) + movl %ebx, 4(%esp) + movl %ecx, 8(%esp) + movl %edx, 12(%esp) movl 16(%esi), %eax - movl 20(%esi), %ecx + movl 20(%esi), %ebx + movl 24(%esi), %ecx + movl 28(%esi), %edx .byte 15 .byte 200 .byte 15 -.byte 201 - movl %eax, 16(%esp) - movl %ecx, 20(%esp) - movl 24(%esi), %eax - movl 28(%esi), %ecx +.byte 203 .byte 15 -.byte 200 -.byte 15 .byte 201 - movl %eax, 24(%esp) - movl %ecx, 28(%esp) +.byte 15 +.byte 202 + movl %eax, 16(%esp) + movl %ebx, 20(%esp) + movl %ecx, 24(%esp) + movl %edx, 28(%esp) movl 32(%esi), %eax - movl 36(%esi), %ecx + movl 36(%esi), %ebx + movl 40(%esi), %ecx + movl 44(%esi), %edx .byte 15 .byte 200 .byte 15 -.byte 201 - movl %eax, 32(%esp) - movl %ecx, 36(%esp) - movl 40(%esi), %eax - movl 44(%esi), %ecx +.byte 203 .byte 15 -.byte 200 -.byte 15 .byte 201 - movl %eax, 40(%esp) - movl %ecx, 44(%esp) +.byte 15 +.byte 202 + movl %eax, 32(%esp) + movl %ebx, 36(%esp) + movl %ecx, 40(%esp) + movl %edx, 44(%esp) movl 48(%esi), %eax - movl 52(%esi), %ecx + movl 52(%esi), %ebx + movl 56(%esi), %ecx + movl 60(%esi), %edx .byte 15 .byte 200 .byte 15 -.byte 201 - movl %eax, 48(%esp) - movl %ecx, 52(%esp) - movl 56(%esi), %eax - movl 60(%esi), %ecx +.byte 203 .byte 15 -.byte 200 -.byte 15 .byte 201 - movl %eax, 56(%esp) - movl %ecx, 60(%esp) - - - movl %esi, 132(%esp) -.L001shortcut: - - +.byte 15 +.byte 202 + movl %eax, 48(%esp) + movl %ebx, 52(%esp) + movl %ecx, 56(%esp) + movl %edx, 60(%esp) + movl %esi, 88(%esp) movl (%ebp), %eax - movl 4(%ebp), %ecx + movl 4(%ebp), %ebx + movl 8(%ebp), %ecx + movl 12(%ebp), %edx + movl %ecx, %esi movl %eax, %ebp - movl %ebx, %esi roll $5, %ebp xorl %edx, %esi - andl %ecx, %esi - rorl $2, %ecx addl %edi, %ebp + andl %ebx, %esi movl (%esp), %edi xorl %edx, %esi - leal 1518500249(%ebp,%edi,1),%ebp - addl %ebp, %esi + rorl $2, %ebx + leal 1518500249(%ebp,%edi),%ebp + addl %esi, %ebp - movl %esi, %ebp - movl %ecx, %edi + movl %ebx, %edi + movl %ebp, %esi roll $5, %ebp - xorl %ebx, %edi - andl %eax, %edi - rorl $2, %eax + xorl %ecx, %edi addl %edx, %ebp + andl %eax, %edi movl 4(%esp), %edx - xorl %ebx, %edi - leal 1518500249(%ebp,%edx,1),%ebp - addl %ebp, %edi + xorl %ecx, %edi + rorl $2, %eax + leal 1518500249(%ebp,%edx),%ebp + addl %edi, %ebp - movl %edi, %ebp movl %eax, %edx + movl %ebp, %edi roll $5, %ebp - xorl %ecx, %edx + xorl %ebx, %edx + addl %ecx, %ebp andl %esi, %edx + movl 8(%esp), %ecx + xorl %ebx, %edx rorl $2, %esi - addl %ebx, %ebp - movl 8(%esp), %ebx - xorl %ecx, %edx - leal 1518500249(%ebp,%ebx,1),%ebp - addl %ebp, %edx + leal 1518500249(%ebp,%ecx),%ebp + addl %edx, %ebp - movl %edx, %ebp - movl %esi, %ebx + movl %esi, %ecx + movl %ebp, %edx roll $5, %ebp - xorl %eax, %ebx - andl %edi, %ebx + xorl %eax, %ecx + addl %ebx, %ebp + andl %edi, %ecx + movl 12(%esp), %ebx + xorl %eax, %ecx rorl $2, %edi + leal 1518500249(%ebp,%ebx),%ebp addl %ecx, %ebp - movl 12(%esp), %ecx - xorl %eax, %ebx - leal 1518500249(%ebp,%ecx,1),%ebp - addl %ebp, %ebx - movl %ebx, %ebp - movl %edi, %ecx + movl %edi, %ebx + movl %ebp, %ecx roll $5, %ebp - xorl %esi, %ecx - andl %edx, %ecx - rorl $2, %edx + xorl %esi, %ebx addl %eax, %ebp + andl %edx, %ebx movl 16(%esp), %eax - xorl %esi, %ecx - leal 1518500249(%ebp,%eax,1),%ebp - addl %ebp, %ecx + xorl %esi, %ebx + rorl $2, %edx + leal 1518500249(%ebp,%eax),%ebp + addl %ebx, %ebp - movl %ecx, %ebp movl %edx, %eax + movl %ebp, %ebx roll $5, %ebp xorl %edi, %eax - andl %ebx, %eax - rorl $2, %ebx addl %esi, %ebp + andl %ecx, %eax movl 20(%esp), %esi xorl %edi, %eax - leal 1518500249(%ebp,%esi,1),%ebp - addl %ebp, %eax + rorl $2, %ecx + leal 1518500249(%ebp,%esi),%ebp + addl %eax, %ebp - movl %eax, %ebp - movl %ebx, %esi + movl %ecx, %esi + movl %ebp, %eax roll $5, %ebp xorl %edx, %esi - andl %ecx, %esi - rorl $2, %ecx addl %edi, %ebp + andl %ebx, %esi movl 24(%esp), %edi xorl %edx, %esi - leal 1518500249(%ebp,%edi,1),%ebp - addl %ebp, %esi + rorl $2, %ebx + leal 1518500249(%ebp,%edi),%ebp + addl %esi, %ebp - movl %esi, %ebp - movl %ecx, %edi + movl %ebx, %edi + movl %ebp, %esi roll $5, %ebp - xorl %ebx, %edi - andl %eax, %edi - rorl $2, %eax + xorl %ecx, %edi addl %edx, %ebp + andl %eax, %edi movl 28(%esp), %edx - xorl %ebx, %edi - leal 1518500249(%ebp,%edx,1),%ebp - addl %ebp, %edi + xorl %ecx, %edi + rorl $2, %eax + leal 1518500249(%ebp,%edx),%ebp + addl %edi, %ebp - movl %edi, %ebp movl %eax, %edx + movl %ebp, %edi roll $5, %ebp - xorl %ecx, %edx + xorl %ebx, %edx + addl %ecx, %ebp andl %esi, %edx + movl 32(%esp), %ecx + xorl %ebx, %edx rorl $2, %esi - addl %ebx, %ebp - movl 32(%esp), %ebx - xorl %ecx, %edx - leal 1518500249(%ebp,%ebx,1),%ebp - addl %ebp, %edx + leal 1518500249(%ebp,%ecx),%ebp + addl %edx, %ebp - movl %edx, %ebp - movl %esi, %ebx + movl %esi, %ecx + movl %ebp, %edx roll $5, %ebp - xorl %eax, %ebx - andl %edi, %ebx + xorl %eax, %ecx + addl %ebx, %ebp + andl %edi, %ecx + movl 36(%esp), %ebx + xorl %eax, %ecx rorl $2, %edi + leal 1518500249(%ebp,%ebx),%ebp addl %ecx, %ebp - movl 36(%esp), %ecx - xorl %eax, %ebx - leal 1518500249(%ebp,%ecx,1),%ebp - addl %ebp, %ebx - movl %ebx, %ebp - movl %edi, %ecx + movl %edi, %ebx + movl %ebp, %ecx roll $5, %ebp - xorl %esi, %ecx - andl %edx, %ecx - rorl $2, %edx + xorl %esi, %ebx addl %eax, %ebp + andl %edx, %ebx movl 40(%esp), %eax - xorl %esi, %ecx - leal 1518500249(%ebp,%eax,1),%ebp - addl %ebp, %ecx + xorl %esi, %ebx + rorl $2, %edx + leal 1518500249(%ebp,%eax),%ebp + addl %ebx, %ebp - movl %ecx, %ebp movl %edx, %eax + movl %ebp, %ebx roll $5, %ebp xorl %edi, %eax - andl %ebx, %eax - rorl $2, %ebx addl %esi, %ebp + andl %ecx, %eax movl 44(%esp), %esi xorl %edi, %eax - leal 1518500249(%ebp,%esi,1),%ebp - addl %ebp, %eax + rorl $2, %ecx + leal 1518500249(%ebp,%esi),%ebp + addl %eax, %ebp - movl %eax, %ebp - movl %ebx, %esi + movl %ecx, %esi + movl %ebp, %eax roll $5, %ebp xorl %edx, %esi - andl %ecx, %esi - rorl $2, %ecx addl %edi, %ebp + andl %ebx, %esi movl 48(%esp), %edi xorl %edx, %esi - leal 1518500249(%ebp,%edi,1),%ebp - addl %ebp, %esi + rorl $2, %ebx + leal 1518500249(%ebp,%edi),%ebp + addl %esi, %ebp - movl %esi, %ebp - movl %ecx, %edi + movl %ebx, %edi + movl %ebp, %esi roll $5, %ebp - xorl %ebx, %edi - andl %eax, %edi - rorl $2, %eax + xorl %ecx, %edi addl %edx, %ebp + andl %eax, %edi movl 52(%esp), %edx - xorl %ebx, %edi - leal 1518500249(%ebp,%edx,1),%ebp - addl %ebp, %edi + xorl %ecx, %edi + rorl $2, %eax + leal 1518500249(%ebp,%edx),%ebp + addl %edi, %ebp - movl %edi, %ebp movl %eax, %edx + movl %ebp, %edi roll $5, %ebp - xorl %ecx, %edx + xorl %ebx, %edx + addl %ecx, %ebp andl %esi, %edx + movl 56(%esp), %ecx + xorl %ebx, %edx rorl $2, %esi - addl %ebx, %ebp - movl 56(%esp), %ebx - xorl %ecx, %edx - leal 1518500249(%ebp,%ebx,1),%ebp - addl %ebp, %edx + leal 1518500249(%ebp,%ecx),%ebp + addl %edx, %ebp - movl %edx, %ebp - movl %esi, %ebx + movl %esi, %ecx + movl %ebp, %edx roll $5, %ebp - xorl %eax, %ebx - andl %edi, %ebx + xorl %eax, %ecx + addl %ebx, %ebp + andl %edi, %ecx + movl 60(%esp), %ebx + xorl %eax, %ecx rorl $2, %edi - addl %ecx, %ebp - movl 60(%esp), %ecx - xorl %eax, %ebx - leal 1518500249(%ebp,%ecx,1),%ebp - addl %ebp, %ebx + leal 1518500249(%ebp,%ebx),%ebp + addl %ebp, %ecx - movl 8(%esp), %ecx + movl (%esp), %ebx movl %edi, %ebp - xorl (%esp), %ecx + xorl 8(%esp), %ebx xorl %esi, %ebp - xorl 32(%esp), %ecx + xorl 32(%esp), %ebx andl %edx, %ebp - xorl 52(%esp), %ecx rorl $2, %edx - xorl %esi, %ebp + xorl 52(%esp), %ebx .byte 209 -.byte 193 - movl %ecx, (%esp) - leal 1518500249(%ecx,%eax,1),%ecx - movl %ebx, %eax - addl %ebp, %ecx +.byte 195 + xorl %esi, %ebp + movl %ebx, (%esp) + leal 1518500249(%ebx,%eax),%ebx + movl %ecx, %eax roll $5, %eax - addl %eax, %ecx + addl %ebp, %ebx + addl %eax, %ebx - movl 12(%esp), %eax + movl 4(%esp), %eax movl %edx, %ebp - xorl 4(%esp), %eax + xorl 12(%esp), %eax xorl %edi, %ebp xorl 36(%esp), %eax - andl %ebx, %ebp + andl %ecx, %ebp + rorl $2, %ecx xorl 56(%esp), %eax - rorl $2, %ebx - xorl %edi, %ebp .byte 209 .byte 192 + xorl %edi, %ebp movl %eax, 4(%esp) - leal 1518500249(%eax,%esi,1),%eax - movl %ecx, %esi - addl %ebp, %eax + leal 1518500249(%eax,%esi),%eax + movl %ebx, %esi roll $5, %esi + addl %ebp, %eax addl %esi, %eax - movl 16(%esp), %esi - movl %ebx, %ebp - xorl 8(%esp), %esi + movl 8(%esp), %esi + movl %ecx, %ebp + xorl 16(%esp), %esi xorl %edx, %ebp xorl 40(%esp), %esi - andl %ecx, %ebp + andl %ebx, %ebp + rorl $2, %ebx xorl 60(%esp), %esi - rorl $2, %ecx - xorl %edx, %ebp .byte 209 .byte 198 + xorl %edx, %ebp movl %esi, 8(%esp) - leal 1518500249(%esi,%edi,1),%esi + leal 1518500249(%esi,%edi),%esi movl %eax, %edi - addl %ebp, %esi roll $5, %edi + addl %ebp, %esi addl %edi, %esi - movl 20(%esp), %edi - movl %ecx, %ebp - xorl 12(%esp), %edi - xorl %ebx, %ebp + movl 12(%esp), %edi + movl %ebx, %ebp + xorl 20(%esp), %edi + xorl %ecx, %ebp xorl 44(%esp), %edi andl %eax, %ebp - xorl (%esp), %edi rorl $2, %eax - xorl %ebx, %ebp + xorl (%esp), %edi .byte 209 .byte 199 + xorl %ecx, %ebp movl %edi, 12(%esp) - leal 1518500249(%edi,%edx,1),%edi + leal 1518500249(%edi,%edx),%edi movl %esi, %edx - addl %ebp, %edi roll $5, %edx + addl %ebp, %edi addl %edx, %edi - movl 16(%esp), %edx movl %esi, %ebp - xorl 24(%esp), %edx + movl 16(%esp), %edx rorl $2, %esi - xorl 48(%esp), %edx + xorl 24(%esp), %edx xorl %eax, %ebp + xorl 48(%esp), %edx + xorl %ebx, %ebp xorl 4(%esp), %edx - xorl %ecx, %ebp .byte 209 .byte 194 + addl %ecx, %ebp movl %edx, 16(%esp) - leal 1859775393(%edx,%ebx,1),%edx - movl %edi, %ebx - roll $5, %ebx - addl %ebp, %edx - addl %ebx, %edx + movl %edi, %ecx + roll $5, %ecx + leal 1859775393(%edx,%ebp),%edx + addl %ecx, %edx - movl 20(%esp), %ebx movl %edi, %ebp - xorl 28(%esp), %ebx + movl 20(%esp), %ecx rorl $2, %edi - xorl 52(%esp), %ebx + xorl 28(%esp), %ecx xorl %esi, %ebp - xorl 8(%esp), %ebx + xorl 52(%esp), %ecx xorl %eax, %ebp + xorl 8(%esp), %ecx .byte 209 -.byte 195 - movl %ebx, 20(%esp) - leal 1859775393(%ebx,%ecx,1),%ebx - movl %edx, %ecx - roll $5, %ecx - addl %ebp, %ebx - addl %ecx, %ebx +.byte 193 + addl %ebx, %ebp + movl %ecx, 20(%esp) + movl %edx, %ebx + roll $5, %ebx + leal 1859775393(%ecx,%ebp),%ecx + addl %ebx, %ecx - movl 24(%esp), %ecx movl %edx, %ebp - xorl 32(%esp), %ecx + movl 24(%esp), %ebx rorl $2, %edx - xorl 56(%esp), %ecx + xorl 32(%esp), %ebx xorl %edi, %ebp - xorl 12(%esp), %ecx + xorl 56(%esp), %ebx xorl %esi, %ebp + xorl 12(%esp), %ebx .byte 209 -.byte 193 - movl %ecx, 24(%esp) - leal 1859775393(%ecx,%eax,1),%ecx - movl %ebx, %eax +.byte 195 + addl %eax, %ebp + movl %ebx, 24(%esp) + movl %ecx, %eax roll $5, %eax - addl %ebp, %ecx - addl %eax, %ecx + leal 1859775393(%ebx,%ebp),%ebx + addl %eax, %ebx + movl %ecx, %ebp movl 28(%esp), %eax - movl %ebx, %ebp + rorl $2, %ecx xorl 36(%esp), %eax - rorl $2, %ebx - xorl 60(%esp), %eax xorl %edx, %ebp - xorl 16(%esp), %eax + xorl 60(%esp), %eax xorl %edi, %ebp + xorl 16(%esp), %eax .byte 209 .byte 192 + addl %esi, %ebp movl %eax, 28(%esp) - leal 1859775393(%eax,%esi,1),%eax - movl %ecx, %esi + movl %ebx, %esi roll $5, %esi - addl %ebp, %eax + leal 1859775393(%eax,%ebp),%eax addl %esi, %eax + movl %ebx, %ebp movl 32(%esp), %esi - movl %ecx, %ebp + rorl $2, %ebx xorl 40(%esp), %esi - rorl $2, %ecx + xorl %ecx, %ebp xorl (%esp), %esi - xorl %ebx, %ebp - xorl 20(%esp), %esi xorl %edx, %ebp + xorl 20(%esp), %esi .byte 209 .byte 198 + addl %edi, %ebp movl %esi, 32(%esp) - leal 1859775393(%esi,%edi,1),%esi movl %eax, %edi roll $5, %edi - addl %ebp, %esi + leal 1859775393(%esi,%ebp),%esi addl %edi, %esi - movl 36(%esp), %edi movl %eax, %ebp - xorl 44(%esp), %edi + movl 36(%esp), %edi rorl $2, %eax + xorl 44(%esp), %edi + xorl %ebx, %ebp xorl 4(%esp), %edi xorl %ecx, %ebp xorl 24(%esp), %edi - xorl %ebx, %ebp .byte 209 .byte 199 + addl %edx, %ebp movl %edi, 36(%esp) - leal 1859775393(%edi,%edx,1),%edi movl %esi, %edx roll $5, %edx - addl %ebp, %edi + leal 1859775393(%edi,%ebp),%edi addl %edx, %edi - movl 40(%esp), %edx movl %esi, %ebp - xorl 48(%esp), %edx + movl 40(%esp), %edx rorl $2, %esi - xorl 8(%esp), %edx + xorl 48(%esp), %edx xorl %eax, %ebp + xorl 8(%esp), %edx + xorl %ebx, %ebp xorl 28(%esp), %edx - xorl %ecx, %ebp .byte 209 .byte 194 + addl %ecx, %ebp movl %edx, 40(%esp) - leal 1859775393(%edx,%ebx,1),%edx - movl %edi, %ebx - roll $5, %ebx - addl %ebp, %edx - addl %ebx, %edx + movl %edi, %ecx + roll $5, %ecx + leal 1859775393(%edx,%ebp),%edx + addl %ecx, %edx - movl 44(%esp), %ebx movl %edi, %ebp - xorl 52(%esp), %ebx + movl 44(%esp), %ecx rorl $2, %edi - xorl 12(%esp), %ebx + xorl 52(%esp), %ecx xorl %esi, %ebp - xorl 32(%esp), %ebx + xorl 12(%esp), %ecx xorl %eax, %ebp + xorl 32(%esp), %ecx .byte 209 -.byte 195 - movl %ebx, 44(%esp) - leal 1859775393(%ebx,%ecx,1),%ebx - movl %edx, %ecx - roll $5, %ecx - addl %ebp, %ebx - addl %ecx, %ebx +.byte 193 + addl %ebx, %ebp + movl %ecx, 44(%esp) + movl %edx, %ebx + roll $5, %ebx + leal 1859775393(%ecx,%ebp),%ecx + addl %ebx, %ecx - movl 48(%esp), %ecx movl %edx, %ebp - xorl 56(%esp), %ecx + movl 48(%esp), %ebx rorl $2, %edx - xorl 16(%esp), %ecx + xorl 56(%esp), %ebx xorl %edi, %ebp - xorl 36(%esp), %ecx + xorl 16(%esp), %ebx xorl %esi, %ebp + xorl 36(%esp), %ebx .byte 209 -.byte 193 - movl %ecx, 48(%esp) - leal 1859775393(%ecx,%eax,1),%ecx - movl %ebx, %eax +.byte 195 + addl %eax, %ebp + movl %ebx, 48(%esp) + movl %ecx, %eax roll $5, %eax - addl %ebp, %ecx - addl %eax, %ecx + leal 1859775393(%ebx,%ebp),%ebx + addl %eax, %ebx + movl %ecx, %ebp movl 52(%esp), %eax - movl %ebx, %ebp + rorl $2, %ecx xorl 60(%esp), %eax - rorl $2, %ebx - xorl 20(%esp), %eax xorl %edx, %ebp - xorl 40(%esp), %eax + xorl 20(%esp), %eax xorl %edi, %ebp + xorl 40(%esp), %eax .byte 209 .byte 192 + addl %esi, %ebp movl %eax, 52(%esp) - leal 1859775393(%eax,%esi,1),%eax - movl %ecx, %esi + movl %ebx, %esi roll $5, %esi - addl %ebp, %eax + leal 1859775393(%eax,%ebp),%eax addl %esi, %eax + movl %ebx, %ebp movl 56(%esp), %esi - movl %ecx, %ebp + rorl $2, %ebx xorl (%esp), %esi - rorl $2, %ecx + xorl %ecx, %ebp xorl 24(%esp), %esi - xorl %ebx, %ebp - xorl 44(%esp), %esi xorl %edx, %ebp + xorl 44(%esp), %esi .byte 209 .byte 198 + addl %edi, %ebp movl %esi, 56(%esp) - leal 1859775393(%esi,%edi,1),%esi movl %eax, %edi roll $5, %edi - addl %ebp, %esi + leal 1859775393(%esi,%ebp),%esi addl %edi, %esi - movl 60(%esp), %edi movl %eax, %ebp - xorl 4(%esp), %edi + movl 60(%esp), %edi rorl $2, %eax + xorl 4(%esp), %edi + xorl %ebx, %ebp xorl 28(%esp), %edi xorl %ecx, %ebp xorl 48(%esp), %edi - xorl %ebx, %ebp .byte 209 .byte 199 + addl %edx, %ebp movl %edi, 60(%esp) - leal 1859775393(%edi,%edx,1),%edi movl %esi, %edx roll $5, %edx - addl %ebp, %edi + leal 1859775393(%edi,%ebp),%edi addl %edx, %edi - movl (%esp), %edx movl %esi, %ebp - xorl 8(%esp), %edx + movl (%esp), %edx rorl $2, %esi - xorl 32(%esp), %edx + xorl 8(%esp), %edx xorl %eax, %ebp + xorl 32(%esp), %edx + xorl %ebx, %ebp xorl 52(%esp), %edx - xorl %ecx, %ebp .byte 209 .byte 194 + addl %ecx, %ebp movl %edx, (%esp) - leal 1859775393(%edx,%ebx,1),%edx - movl %edi, %ebx - roll $5, %ebx - addl %ebp, %edx - addl %ebx, %edx + movl %edi, %ecx + roll $5, %ecx + leal 1859775393(%edx,%ebp),%edx + addl %ecx, %edx - movl 4(%esp), %ebx movl %edi, %ebp - xorl 12(%esp), %ebx + movl 4(%esp), %ecx rorl $2, %edi - xorl 36(%esp), %ebx + xorl 12(%esp), %ecx xorl %esi, %ebp - xorl 56(%esp), %ebx + xorl 36(%esp), %ecx xorl %eax, %ebp + xorl 56(%esp), %ecx .byte 209 -.byte 195 - movl %ebx, 4(%esp) - leal 1859775393(%ebx,%ecx,1),%ebx - movl %edx, %ecx - roll $5, %ecx - addl %ebp, %ebx - addl %ecx, %ebx +.byte 193 + addl %ebx, %ebp + movl %ecx, 4(%esp) + movl %edx, %ebx + roll $5, %ebx + leal 1859775393(%ecx,%ebp),%ecx + addl %ebx, %ecx - movl 8(%esp), %ecx movl %edx, %ebp - xorl 16(%esp), %ecx + movl 8(%esp), %ebx rorl $2, %edx - xorl 40(%esp), %ecx + xorl 16(%esp), %ebx xorl %edi, %ebp - xorl 60(%esp), %ecx + xorl 40(%esp), %ebx xorl %esi, %ebp + xorl 60(%esp), %ebx .byte 209 -.byte 193 - movl %ecx, 8(%esp) - leal 1859775393(%ecx,%eax,1),%ecx - movl %ebx, %eax +.byte 195 + addl %eax, %ebp + movl %ebx, 8(%esp) + movl %ecx, %eax roll $5, %eax - addl %ebp, %ecx - addl %eax, %ecx + leal 1859775393(%ebx,%ebp),%ebx + addl %eax, %ebx + movl %ecx, %ebp movl 12(%esp), %eax - movl %ebx, %ebp + rorl $2, %ecx xorl 20(%esp), %eax - rorl $2, %ebx - xorl 44(%esp), %eax xorl %edx, %ebp - xorl (%esp), %eax + xorl 44(%esp), %eax xorl %edi, %ebp + xorl (%esp), %eax .byte 209 .byte 192 + addl %esi, %ebp movl %eax, 12(%esp) - leal 1859775393(%eax,%esi,1),%eax - movl %ecx, %esi + movl %ebx, %esi roll $5, %esi - addl %ebp, %eax + leal 1859775393(%eax,%ebp),%eax addl %esi, %eax + movl %ebx, %ebp movl 16(%esp), %esi - movl %ecx, %ebp + rorl $2, %ebx xorl 24(%esp), %esi - rorl $2, %ecx + xorl %ecx, %ebp xorl 48(%esp), %esi - xorl %ebx, %ebp - xorl 4(%esp), %esi xorl %edx, %ebp + xorl 4(%esp), %esi .byte 209 .byte 198 + addl %edi, %ebp movl %esi, 16(%esp) - leal 1859775393(%esi,%edi,1),%esi movl %eax, %edi roll $5, %edi - addl %ebp, %esi + leal 1859775393(%esi,%ebp),%esi addl %edi, %esi - movl 20(%esp), %edi movl %eax, %ebp - xorl 28(%esp), %edi + movl 20(%esp), %edi rorl $2, %eax + xorl 28(%esp), %edi + xorl %ebx, %ebp xorl 52(%esp), %edi xorl %ecx, %ebp xorl 8(%esp), %edi - xorl %ebx, %ebp .byte 209 .byte 199 + addl %edx, %ebp movl %edi, 20(%esp) - leal 1859775393(%edi,%edx,1),%edi movl %esi, %edx roll $5, %edx - addl %ebp, %edi + leal 1859775393(%edi,%ebp),%edi addl %edx, %edi - movl 24(%esp), %edx movl %esi, %ebp - xorl 32(%esp), %edx + movl 24(%esp), %edx rorl $2, %esi - xorl 56(%esp), %edx + xorl 32(%esp), %edx xorl %eax, %ebp + xorl 56(%esp), %edx + xorl %ebx, %ebp xorl 12(%esp), %edx - xorl %ecx, %ebp .byte 209 .byte 194 + addl %ecx, %ebp movl %edx, 24(%esp) - leal 1859775393(%edx,%ebx,1),%edx - movl %edi, %ebx - roll $5, %ebx - addl %ebp, %edx - addl %ebx, %edx + movl %edi, %ecx + roll $5, %ecx + leal 1859775393(%edx,%ebp),%edx + addl %ecx, %edx - movl 28(%esp), %ebx movl %edi, %ebp - xorl 36(%esp), %ebx + movl 28(%esp), %ecx rorl $2, %edi - xorl 60(%esp), %ebx + xorl 36(%esp), %ecx xorl %esi, %ebp - xorl 16(%esp), %ebx + xorl 60(%esp), %ecx xorl %eax, %ebp + xorl 16(%esp), %ecx .byte 209 -.byte 195 - movl %ebx, 28(%esp) - leal 1859775393(%ebx,%ecx,1),%ebx - movl %edx, %ecx - roll $5, %ecx - addl %ebp, %ebx - addl %ecx, %ebx +.byte 193 + addl %ebx, %ebp + movl %ecx, 28(%esp) + movl %edx, %ebx + roll $5, %ebx + leal 1859775393(%ecx,%ebp),%ecx + addl %ebx, %ecx - movl 32(%esp), %ecx + movl 32(%esp), %ebx + movl 40(%esp), %ebp + xorl %ebp, %ebx + movl (%esp), %ebp + xorl %ebp, %ebx + movl 20(%esp), %ebp + xorl %ebp, %ebx movl %edx, %ebp - xorl 40(%esp), %ecx +.byte 209 +.byte 195 orl %edi, %ebp - xorl (%esp), %ecx + movl %ebx, 32(%esp) andl %esi, %ebp - xorl 20(%esp), %ecx -.byte 209 -.byte 193 - movl %ecx, 32(%esp) - leal 2400959708(%ecx,%eax,1),%ecx + leal 2400959708(%ebx,%eax),%ebx movl %edx, %eax rorl $2, %edx andl %edi, %eax orl %eax, %ebp - movl %ebx, %eax + movl %ecx, %eax roll $5, %eax - addl %eax, %ebp - addl %ebp, %ecx + addl %ebp, %ebx + addl %eax, %ebx movl 36(%esp), %eax - movl %ebx, %ebp - xorl 44(%esp), %eax - orl %edx, %ebp - xorl 4(%esp), %eax - andl %edi, %ebp - xorl 24(%esp), %eax + movl 44(%esp), %ebp + xorl %ebp, %eax + movl 4(%esp), %ebp + xorl %ebp, %eax + movl 24(%esp), %ebp + xorl %ebp, %eax + movl %ecx, %ebp .byte 209 .byte 192 + orl %edx, %ebp movl %eax, 36(%esp) - leal 2400959708(%eax,%esi,1),%eax - movl %ebx, %esi - rorl $2, %ebx + andl %edi, %ebp + leal 2400959708(%eax,%esi),%eax + movl %ecx, %esi + rorl $2, %ecx andl %edx, %esi orl %esi, %ebp - movl %ecx, %esi + movl %ebx, %esi roll $5, %esi - addl %esi, %ebp addl %ebp, %eax + addl %esi, %eax movl 40(%esp), %esi - movl %ecx, %ebp - xorl 48(%esp), %esi - orl %ebx, %ebp - xorl 8(%esp), %esi - andl %edx, %ebp - xorl 28(%esp), %esi + movl 48(%esp), %ebp + xorl %ebp, %esi + movl 8(%esp), %ebp + xorl %ebp, %esi + movl 28(%esp), %ebp + xorl %ebp, %esi + movl %ebx, %ebp .byte 209 .byte 198 + orl %ecx, %ebp movl %esi, 40(%esp) - leal 2400959708(%esi,%edi,1),%esi - movl %ecx, %edi - rorl $2, %ecx - andl %ebx, %edi + andl %edx, %ebp + leal 2400959708(%esi,%edi),%esi + movl %ebx, %edi + rorl $2, %ebx + andl %ecx, %edi orl %edi, %ebp movl %eax, %edi roll $5, %edi - addl %edi, %ebp addl %ebp, %esi + addl %edi, %esi movl 44(%esp), %edi + movl 52(%esp), %ebp + xorl %ebp, %edi + movl 12(%esp), %ebp + xorl %ebp, %edi + movl 32(%esp), %ebp + xorl %ebp, %edi movl %eax, %ebp - xorl 52(%esp), %edi - orl %ecx, %ebp - xorl 12(%esp), %edi - andl %ebx, %ebp - xorl 32(%esp), %edi .byte 209 .byte 199 + orl %ebx, %ebp movl %edi, 44(%esp) - leal 2400959708(%edi,%edx,1),%edi + andl %ecx, %ebp + leal 2400959708(%edi,%edx),%edi movl %eax, %edx rorl $2, %eax - andl %ecx, %edx + andl %ebx, %edx orl %edx, %ebp movl %esi, %edx roll $5, %edx - addl %edx, %ebp addl %ebp, %edi + addl %edx, %edi movl 48(%esp), %edx + movl 56(%esp), %ebp + xorl %ebp, %edx + movl 16(%esp), %ebp + xorl %ebp, %edx + movl 36(%esp), %ebp + xorl %ebp, %edx movl %esi, %ebp - xorl 56(%esp), %edx - orl %eax, %ebp - xorl 16(%esp), %edx - andl %ecx, %ebp - xorl 36(%esp), %edx .byte 209 .byte 194 + orl %eax, %ebp movl %edx, 48(%esp) - leal 2400959708(%edx,%ebx,1),%edx - movl %esi, %ebx + andl %ebx, %ebp + leal 2400959708(%edx,%ecx),%edx + movl %esi, %ecx rorl $2, %esi - andl %eax, %ebx - orl %ebx, %ebp - movl %edi, %ebx - roll $5, %ebx - addl %ebx, %ebp + andl %eax, %ecx + orl %ecx, %ebp + movl %edi, %ecx + roll $5, %ecx addl %ebp, %edx + addl %ecx, %edx - movl 52(%esp), %ebx + movl 52(%esp), %ecx + movl 60(%esp), %ebp + xorl %ebp, %ecx + movl 20(%esp), %ebp + xorl %ebp, %ecx + movl 40(%esp), %ebp + xorl %ebp, %ecx movl %edi, %ebp - xorl 60(%esp), %ebx +.byte 209 +.byte 193 orl %esi, %ebp - xorl 20(%esp), %ebx + movl %ecx, 52(%esp) andl %eax, %ebp - xorl 40(%esp), %ebx -.byte 209 -.byte 195 - movl %ebx, 52(%esp) - leal 2400959708(%ebx,%ecx,1),%ebx - movl %edi, %ecx + leal 2400959708(%ecx,%ebx),%ecx + movl %edi, %ebx rorl $2, %edi - andl %esi, %ecx - orl %ecx, %ebp - movl %edx, %ecx - roll $5, %ecx - addl %ecx, %ebp - addl %ebp, %ebx + andl %esi, %ebx + orl %ebx, %ebp + movl %edx, %ebx + roll $5, %ebx + addl %ebp, %ecx + addl %ebx, %ecx - movl 56(%esp), %ecx + movl 56(%esp), %ebx + movl (%esp), %ebp + xorl %ebp, %ebx + movl 24(%esp), %ebp + xorl %ebp, %ebx + movl 44(%esp), %ebp + xorl %ebp, %ebx movl %edx, %ebp - xorl (%esp), %ecx +.byte 209 +.byte 195 orl %edi, %ebp - xorl 24(%esp), %ecx + movl %ebx, 56(%esp) andl %esi, %ebp - xorl 44(%esp), %ecx -.byte 209 -.byte 193 - movl %ecx, 56(%esp) - leal 2400959708(%ecx,%eax,1),%ecx + leal 2400959708(%ebx,%eax),%ebx movl %edx, %eax rorl $2, %edx andl %edi, %eax orl %eax, %ebp - movl %ebx, %eax + movl %ecx, %eax roll $5, %eax - addl %eax, %ebp - addl %ebp, %ecx + addl %ebp, %ebx + addl %eax, %ebx movl 60(%esp), %eax - movl %ebx, %ebp - xorl 4(%esp), %eax - orl %edx, %ebp - xorl 28(%esp), %eax - andl %edi, %ebp - xorl 48(%esp), %eax + movl 4(%esp), %ebp + xorl %ebp, %eax + movl 28(%esp), %ebp + xorl %ebp, %eax + movl 48(%esp), %ebp + xorl %ebp, %eax + movl %ecx, %ebp .byte 209 .byte 192 + orl %edx, %ebp movl %eax, 60(%esp) - leal 2400959708(%eax,%esi,1),%eax - movl %ebx, %esi - rorl $2, %ebx + andl %edi, %ebp + leal 2400959708(%eax,%esi),%eax + movl %ecx, %esi + rorl $2, %ecx andl %edx, %esi orl %esi, %ebp - movl %ecx, %esi + movl %ebx, %esi roll $5, %esi - addl %esi, %ebp addl %ebp, %eax + addl %esi, %eax movl (%esp), %esi - movl %ecx, %ebp - xorl 8(%esp), %esi - orl %ebx, %ebp - xorl 32(%esp), %esi - andl %edx, %ebp - xorl 52(%esp), %esi + movl 8(%esp), %ebp + xorl %ebp, %esi + movl 32(%esp), %ebp + xorl %ebp, %esi + movl 52(%esp), %ebp + xorl %ebp, %esi + movl %ebx, %ebp .byte 209 .byte 198 + orl %ecx, %ebp movl %esi, (%esp) - leal 2400959708(%esi,%edi,1),%esi - movl %ecx, %edi - rorl $2, %ecx - andl %ebx, %edi + andl %edx, %ebp + leal 2400959708(%esi,%edi),%esi + movl %ebx, %edi + rorl $2, %ebx + andl %ecx, %edi orl %edi, %ebp movl %eax, %edi roll $5, %edi - addl %edi, %ebp addl %ebp, %esi + addl %edi, %esi movl 4(%esp), %edi + movl 12(%esp), %ebp + xorl %ebp, %edi + movl 36(%esp), %ebp + xorl %ebp, %edi + movl 56(%esp), %ebp + xorl %ebp, %edi movl %eax, %ebp - xorl 12(%esp), %edi - orl %ecx, %ebp - xorl 36(%esp), %edi - andl %ebx, %ebp - xorl 56(%esp), %edi .byte 209 .byte 199 + orl %ebx, %ebp movl %edi, 4(%esp) - leal 2400959708(%edi,%edx,1),%edi + andl %ecx, %ebp + leal 2400959708(%edi,%edx),%edi movl %eax, %edx rorl $2, %eax - andl %ecx, %edx + andl %ebx, %edx orl %edx, %ebp movl %esi, %edx roll $5, %edx - addl %edx, %ebp addl %ebp, %edi + addl %edx, %edi movl 8(%esp), %edx + movl 16(%esp), %ebp + xorl %ebp, %edx + movl 40(%esp), %ebp + xorl %ebp, %edx + movl 60(%esp), %ebp + xorl %ebp, %edx movl %esi, %ebp - xorl 16(%esp), %edx - orl %eax, %ebp - xorl 40(%esp), %edx - andl %ecx, %ebp - xorl 60(%esp), %edx .byte 209 .byte 194 + orl %eax, %ebp movl %edx, 8(%esp) - leal 2400959708(%edx,%ebx,1),%edx - movl %esi, %ebx + andl %ebx, %ebp + leal 2400959708(%edx,%ecx),%edx + movl %esi, %ecx rorl $2, %esi - andl %eax, %ebx - orl %ebx, %ebp - movl %edi, %ebx - roll $5, %ebx - addl %ebx, %ebp + andl %eax, %ecx + orl %ecx, %ebp + movl %edi, %ecx + roll $5, %ecx addl %ebp, %edx + addl %ecx, %edx - movl 12(%esp), %ebx + movl 12(%esp), %ecx + movl 20(%esp), %ebp + xorl %ebp, %ecx + movl 44(%esp), %ebp + xorl %ebp, %ecx + movl (%esp), %ebp + xorl %ebp, %ecx movl %edi, %ebp - xorl 20(%esp), %ebx +.byte 209 +.byte 193 orl %esi, %ebp - xorl 44(%esp), %ebx + movl %ecx, 12(%esp) andl %eax, %ebp - xorl (%esp), %ebx -.byte 209 -.byte 195 - movl %ebx, 12(%esp) - leal 2400959708(%ebx,%ecx,1),%ebx - movl %edi, %ecx + leal 2400959708(%ecx,%ebx),%ecx + movl %edi, %ebx rorl $2, %edi - andl %esi, %ecx - orl %ecx, %ebp - movl %edx, %ecx - roll $5, %ecx - addl %ecx, %ebp - addl %ebp, %ebx + andl %esi, %ebx + orl %ebx, %ebp + movl %edx, %ebx + roll $5, %ebx + addl %ebp, %ecx + addl %ebx, %ecx - movl 16(%esp), %ecx + movl 16(%esp), %ebx + movl 24(%esp), %ebp + xorl %ebp, %ebx + movl 48(%esp), %ebp + xorl %ebp, %ebx + movl 4(%esp), %ebp + xorl %ebp, %ebx movl %edx, %ebp - xorl 24(%esp), %ecx +.byte 209 +.byte 195 orl %edi, %ebp - xorl 48(%esp), %ecx + movl %ebx, 16(%esp) andl %esi, %ebp - xorl 4(%esp), %ecx -.byte 209 -.byte 193 - movl %ecx, 16(%esp) - leal 2400959708(%ecx,%eax,1),%ecx + leal 2400959708(%ebx,%eax),%ebx movl %edx, %eax rorl $2, %edx andl %edi, %eax orl %eax, %ebp - movl %ebx, %eax + movl %ecx, %eax roll $5, %eax - addl %eax, %ebp - addl %ebp, %ecx + addl %ebp, %ebx + addl %eax, %ebx movl 20(%esp), %eax - movl %ebx, %ebp - xorl 28(%esp), %eax - orl %edx, %ebp - xorl 52(%esp), %eax - andl %edi, %ebp - xorl 8(%esp), %eax + movl 28(%esp), %ebp + xorl %ebp, %eax + movl 52(%esp), %ebp + xorl %ebp, %eax + movl 8(%esp), %ebp + xorl %ebp, %eax + movl %ecx, %ebp .byte 209 .byte 192 + orl %edx, %ebp movl %eax, 20(%esp) - leal 2400959708(%eax,%esi,1),%eax - movl %ebx, %esi - rorl $2, %ebx + andl %edi, %ebp + leal 2400959708(%eax,%esi),%eax + movl %ecx, %esi + rorl $2, %ecx andl %edx, %esi orl %esi, %ebp - movl %ecx, %esi + movl %ebx, %esi roll $5, %esi - addl %esi, %ebp addl %ebp, %eax + addl %esi, %eax movl 24(%esp), %esi - movl %ecx, %ebp - xorl 32(%esp), %esi - orl %ebx, %ebp - xorl 56(%esp), %esi - andl %edx, %ebp - xorl 12(%esp), %esi + movl 32(%esp), %ebp + xorl %ebp, %esi + movl 56(%esp), %ebp + xorl %ebp, %esi + movl 12(%esp), %ebp + xorl %ebp, %esi + movl %ebx, %ebp .byte 209 .byte 198 + orl %ecx, %ebp movl %esi, 24(%esp) - leal 2400959708(%esi,%edi,1),%esi - movl %ecx, %edi - rorl $2, %ecx - andl %ebx, %edi + andl %edx, %ebp + leal 2400959708(%esi,%edi),%esi + movl %ebx, %edi + rorl $2, %ebx + andl %ecx, %edi orl %edi, %ebp movl %eax, %edi roll $5, %edi - addl %edi, %ebp addl %ebp, %esi + addl %edi, %esi movl 28(%esp), %edi + movl 36(%esp), %ebp + xorl %ebp, %edi + movl 60(%esp), %ebp + xorl %ebp, %edi + movl 16(%esp), %ebp + xorl %ebp, %edi movl %eax, %ebp - xorl 36(%esp), %edi - orl %ecx, %ebp - xorl 60(%esp), %edi - andl %ebx, %ebp - xorl 16(%esp), %edi .byte 209 .byte 199 + orl %ebx, %ebp movl %edi, 28(%esp) - leal 2400959708(%edi,%edx,1),%edi + andl %ecx, %ebp + leal 2400959708(%edi,%edx),%edi movl %eax, %edx rorl $2, %eax - andl %ecx, %edx + andl %ebx, %edx orl %edx, %ebp movl %esi, %edx roll $5, %edx - addl %edx, %ebp addl %ebp, %edi + addl %edx, %edi movl 32(%esp), %edx + movl 40(%esp), %ebp + xorl %ebp, %edx + movl (%esp), %ebp + xorl %ebp, %edx + movl 20(%esp), %ebp + xorl %ebp, %edx movl %esi, %ebp - xorl 40(%esp), %edx - orl %eax, %ebp - xorl (%esp), %edx - andl %ecx, %ebp - xorl 20(%esp), %edx .byte 209 .byte 194 + orl %eax, %ebp movl %edx, 32(%esp) - leal 2400959708(%edx,%ebx,1),%edx - movl %esi, %ebx + andl %ebx, %ebp + leal 2400959708(%edx,%ecx),%edx + movl %esi, %ecx rorl $2, %esi - andl %eax, %ebx - orl %ebx, %ebp - movl %edi, %ebx - roll $5, %ebx - addl %ebx, %ebp + andl %eax, %ecx + orl %ecx, %ebp + movl %edi, %ecx + roll $5, %ecx addl %ebp, %edx + addl %ecx, %edx - movl 36(%esp), %ebx + movl 36(%esp), %ecx + movl 44(%esp), %ebp + xorl %ebp, %ecx + movl 4(%esp), %ebp + xorl %ebp, %ecx + movl 24(%esp), %ebp + xorl %ebp, %ecx movl %edi, %ebp - xorl 44(%esp), %ebx +.byte 209 +.byte 193 orl %esi, %ebp - xorl 4(%esp), %ebx + movl %ecx, 36(%esp) andl %eax, %ebp - xorl 24(%esp), %ebx -.byte 209 -.byte 195 - movl %ebx, 36(%esp) - leal 2400959708(%ebx,%ecx,1),%ebx - movl %edi, %ecx + leal 2400959708(%ecx,%ebx),%ecx + movl %edi, %ebx rorl $2, %edi - andl %esi, %ecx - orl %ecx, %ebp - movl %edx, %ecx - roll $5, %ecx - addl %ecx, %ebp - addl %ebp, %ebx + andl %esi, %ebx + orl %ebx, %ebp + movl %edx, %ebx + roll $5, %ebx + addl %ebp, %ecx + addl %ebx, %ecx - movl 40(%esp), %ecx + movl 40(%esp), %ebx + movl 48(%esp), %ebp + xorl %ebp, %ebx + movl 8(%esp), %ebp + xorl %ebp, %ebx + movl 28(%esp), %ebp + xorl %ebp, %ebx movl %edx, %ebp - xorl 48(%esp), %ecx +.byte 209 +.byte 195 orl %edi, %ebp - xorl 8(%esp), %ecx + movl %ebx, 40(%esp) andl %esi, %ebp - xorl 28(%esp), %ecx -.byte 209 -.byte 193 - movl %ecx, 40(%esp) - leal 2400959708(%ecx,%eax,1),%ecx + leal 2400959708(%ebx,%eax),%ebx movl %edx, %eax rorl $2, %edx andl %edi, %eax orl %eax, %ebp - movl %ebx, %eax + movl %ecx, %eax roll $5, %eax - addl %eax, %ebp - addl %ebp, %ecx + addl %ebp, %ebx + addl %eax, %ebx movl 44(%esp), %eax - movl %ebx, %ebp - xorl 52(%esp), %eax - orl %edx, %ebp - xorl 12(%esp), %eax - andl %edi, %ebp - xorl 32(%esp), %eax + movl 52(%esp), %ebp + xorl %ebp, %eax + movl 12(%esp), %ebp + xorl %ebp, %eax + movl 32(%esp), %ebp + xorl %ebp, %eax + movl %ecx, %ebp .byte 209 .byte 192 + orl %edx, %ebp movl %eax, 44(%esp) - leal 2400959708(%eax,%esi,1),%eax - movl %ebx, %esi - rorl $2, %ebx + andl %edi, %ebp + leal 2400959708(%eax,%esi),%eax + movl %ecx, %esi + rorl $2, %ecx andl %edx, %esi orl %esi, %ebp - movl %ecx, %esi + movl %ebx, %esi roll $5, %esi - addl %esi, %ebp addl %ebp, %eax + addl %esi, %eax + movl %ebx, %ebp movl 48(%esp), %esi - movl %ecx, %ebp + rorl $2, %ebx xorl 56(%esp), %esi - rorl $2, %ecx + xorl %ecx, %ebp xorl 16(%esp), %esi - xorl %ebx, %ebp - xorl 36(%esp), %esi xorl %edx, %ebp + xorl 36(%esp), %esi .byte 209 .byte 198 + addl %edi, %ebp movl %esi, 48(%esp) - leal 3395469782(%esi,%edi,1),%esi movl %eax, %edi roll $5, %edi - addl %ebp, %esi + leal 3395469782(%esi,%ebp),%esi addl %edi, %esi - movl 52(%esp), %edi movl %eax, %ebp - xorl 60(%esp), %edi + movl 52(%esp), %edi rorl $2, %eax + xorl 60(%esp), %edi + xorl %ebx, %ebp xorl 20(%esp), %edi xorl %ecx, %ebp xorl 40(%esp), %edi - xorl %ebx, %ebp .byte 209 .byte 199 + addl %edx, %ebp movl %edi, 52(%esp) - leal 3395469782(%edi,%edx,1),%edi movl %esi, %edx roll $5, %edx - addl %ebp, %edi + leal 3395469782(%edi,%ebp),%edi addl %edx, %edi - movl 56(%esp), %edx movl %esi, %ebp - xorl (%esp), %edx + movl 56(%esp), %edx rorl $2, %esi - xorl 24(%esp), %edx + xorl (%esp), %edx xorl %eax, %ebp + xorl 24(%esp), %edx + xorl %ebx, %ebp xorl 44(%esp), %edx - xorl %ecx, %ebp .byte 209 .byte 194 + addl %ecx, %ebp movl %edx, 56(%esp) - leal 3395469782(%edx,%ebx,1),%edx - movl %edi, %ebx - roll $5, %ebx - addl %ebp, %edx - addl %ebx, %edx + movl %edi, %ecx + roll $5, %ecx + leal 3395469782(%edx,%ebp),%edx + addl %ecx, %edx - movl 60(%esp), %ebx movl %edi, %ebp - xorl 4(%esp), %ebx + movl 60(%esp), %ecx rorl $2, %edi - xorl 28(%esp), %ebx + xorl 4(%esp), %ecx xorl %esi, %ebp - xorl 48(%esp), %ebx + xorl 28(%esp), %ecx xorl %eax, %ebp + xorl 48(%esp), %ecx .byte 209 -.byte 195 - movl %ebx, 60(%esp) - leal 3395469782(%ebx,%ecx,1),%ebx - movl %edx, %ecx - roll $5, %ecx - addl %ebp, %ebx - addl %ecx, %ebx +.byte 193 + addl %ebx, %ebp + movl %ecx, 60(%esp) + movl %edx, %ebx + roll $5, %ebx + leal 3395469782(%ecx,%ebp),%ecx + addl %ebx, %ecx - movl (%esp), %ecx movl %edx, %ebp - xorl 8(%esp), %ecx + movl (%esp), %ebx rorl $2, %edx - xorl 32(%esp), %ecx + xorl 8(%esp), %ebx xorl %edi, %ebp - xorl 52(%esp), %ecx + xorl 32(%esp), %ebx xorl %esi, %ebp + xorl 52(%esp), %ebx .byte 209 -.byte 193 - movl %ecx, (%esp) - leal 3395469782(%ecx,%eax,1),%ecx - movl %ebx, %eax +.byte 195 + addl %eax, %ebp + movl %ebx, (%esp) + movl %ecx, %eax roll $5, %eax - addl %ebp, %ecx - addl %eax, %ecx + leal 3395469782(%ebx,%ebp),%ebx + addl %eax, %ebx + movl %ecx, %ebp movl 4(%esp), %eax - movl %ebx, %ebp + rorl $2, %ecx xorl 12(%esp), %eax - rorl $2, %ebx - xorl 36(%esp), %eax xorl %edx, %ebp - xorl 56(%esp), %eax + xorl 36(%esp), %eax xorl %edi, %ebp + xorl 56(%esp), %eax .byte 209 .byte 192 + addl %esi, %ebp movl %eax, 4(%esp) - leal 3395469782(%eax,%esi,1),%eax - movl %ecx, %esi + movl %ebx, %esi roll $5, %esi - addl %ebp, %eax + leal 3395469782(%eax,%ebp),%eax addl %esi, %eax + movl %ebx, %ebp movl 8(%esp), %esi - movl %ecx, %ebp + rorl $2, %ebx xorl 16(%esp), %esi - rorl $2, %ecx + xorl %ecx, %ebp xorl 40(%esp), %esi - xorl %ebx, %ebp - xorl 60(%esp), %esi xorl %edx, %ebp + xorl 60(%esp), %esi .byte 209 .byte 198 + addl %edi, %ebp movl %esi, 8(%esp) - leal 3395469782(%esi,%edi,1),%esi movl %eax, %edi roll $5, %edi - addl %ebp, %esi + leal 3395469782(%esi,%ebp),%esi addl %edi, %esi - movl 12(%esp), %edi movl %eax, %ebp - xorl 20(%esp), %edi + movl 12(%esp), %edi rorl $2, %eax + xorl 20(%esp), %edi + xorl %ebx, %ebp xorl 44(%esp), %edi xorl %ecx, %ebp xorl (%esp), %edi - xorl %ebx, %ebp .byte 209 .byte 199 + addl %edx, %ebp movl %edi, 12(%esp) - leal 3395469782(%edi,%edx,1),%edi movl %esi, %edx roll $5, %edx - addl %ebp, %edi + leal 3395469782(%edi,%ebp),%edi addl %edx, %edi - movl 16(%esp), %edx movl %esi, %ebp - xorl 24(%esp), %edx + movl 16(%esp), %edx rorl $2, %esi - xorl 48(%esp), %edx + xorl 24(%esp), %edx xorl %eax, %ebp + xorl 48(%esp), %edx + xorl %ebx, %ebp xorl 4(%esp), %edx - xorl %ecx, %ebp .byte 209 .byte 194 + addl %ecx, %ebp movl %edx, 16(%esp) - leal 3395469782(%edx,%ebx,1),%edx - movl %edi, %ebx - roll $5, %ebx - addl %ebp, %edx - addl %ebx, %edx + movl %edi, %ecx + roll $5, %ecx + leal 3395469782(%edx,%ebp),%edx + addl %ecx, %edx - movl 20(%esp), %ebx movl %edi, %ebp - xorl 28(%esp), %ebx + movl 20(%esp), %ecx rorl $2, %edi - xorl 52(%esp), %ebx + xorl 28(%esp), %ecx xorl %esi, %ebp - xorl 8(%esp), %ebx + xorl 52(%esp), %ecx xorl %eax, %ebp + xorl 8(%esp), %ecx .byte 209 -.byte 195 - movl %ebx, 20(%esp) - leal 3395469782(%ebx,%ecx,1),%ebx - movl %edx, %ecx - roll $5, %ecx - addl %ebp, %ebx - addl %ecx, %ebx +.byte 193 + addl %ebx, %ebp + movl %ecx, 20(%esp) + movl %edx, %ebx + roll $5, %ebx + leal 3395469782(%ecx,%ebp),%ecx + addl %ebx, %ecx - movl 24(%esp), %ecx movl %edx, %ebp - xorl 32(%esp), %ecx + movl 24(%esp), %ebx rorl $2, %edx - xorl 56(%esp), %ecx + xorl 32(%esp), %ebx xorl %edi, %ebp - xorl 12(%esp), %ecx + xorl 56(%esp), %ebx xorl %esi, %ebp + xorl 12(%esp), %ebx .byte 209 -.byte 193 - movl %ecx, 24(%esp) - leal 3395469782(%ecx,%eax,1),%ecx - movl %ebx, %eax +.byte 195 + addl %eax, %ebp + movl %ebx, 24(%esp) + movl %ecx, %eax roll $5, %eax - addl %ebp, %ecx - addl %eax, %ecx + leal 3395469782(%ebx,%ebp),%ebx + addl %eax, %ebx + movl %ecx, %ebp movl 28(%esp), %eax - movl %ebx, %ebp + rorl $2, %ecx xorl 36(%esp), %eax - rorl $2, %ebx - xorl 60(%esp), %eax xorl %edx, %ebp - xorl 16(%esp), %eax + xorl 60(%esp), %eax xorl %edi, %ebp + xorl 16(%esp), %eax .byte 209 .byte 192 + addl %esi, %ebp movl %eax, 28(%esp) - leal 3395469782(%eax,%esi,1),%eax - movl %ecx, %esi + movl %ebx, %esi roll $5, %esi - addl %ebp, %eax + leal 3395469782(%eax,%ebp),%eax addl %esi, %eax + movl %ebx, %ebp movl 32(%esp), %esi - movl %ecx, %ebp + rorl $2, %ebx xorl 40(%esp), %esi - rorl $2, %ecx + xorl %ecx, %ebp xorl (%esp), %esi - xorl %ebx, %ebp - xorl 20(%esp), %esi xorl %edx, %ebp + xorl 20(%esp), %esi .byte 209 .byte 198 + addl %edi, %ebp movl %esi, 32(%esp) - leal 3395469782(%esi,%edi,1),%esi movl %eax, %edi roll $5, %edi - addl %ebp, %esi + leal 3395469782(%esi,%ebp),%esi addl %edi, %esi - movl 36(%esp), %edi movl %eax, %ebp - xorl 44(%esp), %edi + movl 36(%esp), %edi rorl $2, %eax + xorl 44(%esp), %edi + xorl %ebx, %ebp xorl 4(%esp), %edi xorl %ecx, %ebp xorl 24(%esp), %edi - xorl %ebx, %ebp .byte 209 .byte 199 + addl %edx, %ebp movl %edi, 36(%esp) - leal 3395469782(%edi,%edx,1),%edi movl %esi, %edx roll $5, %edx - addl %ebp, %edi + leal 3395469782(%edi,%ebp),%edi addl %edx, %edi - movl 40(%esp), %edx movl %esi, %ebp - xorl 48(%esp), %edx + movl 40(%esp), %edx rorl $2, %esi - xorl 8(%esp), %edx + xorl 48(%esp), %edx xorl %eax, %ebp + xorl 8(%esp), %edx + xorl %ebx, %ebp xorl 28(%esp), %edx - xorl %ecx, %ebp .byte 209 .byte 194 + addl %ecx, %ebp movl %edx, 40(%esp) - leal 3395469782(%edx,%ebx,1),%edx - movl %edi, %ebx - roll $5, %ebx - addl %ebp, %edx - addl %ebx, %edx + movl %edi, %ecx + roll $5, %ecx + leal 3395469782(%edx,%ebp),%edx + addl %ecx, %edx - movl 44(%esp), %ebx movl %edi, %ebp - xorl 52(%esp), %ebx + movl 44(%esp), %ecx rorl $2, %edi - xorl 12(%esp), %ebx + xorl 52(%esp), %ecx xorl %esi, %ebp - xorl 32(%esp), %ebx + xorl 12(%esp), %ecx xorl %eax, %ebp + xorl 32(%esp), %ecx .byte 209 -.byte 195 - movl %ebx, 44(%esp) - leal 3395469782(%ebx,%ecx,1),%ebx - movl %edx, %ecx - roll $5, %ecx - addl %ebp, %ebx - addl %ecx, %ebx +.byte 193 + addl %ebx, %ebp + movl %ecx, 44(%esp) + movl %edx, %ebx + roll $5, %ebx + leal 3395469782(%ecx,%ebp),%ecx + addl %ebx, %ecx - movl 48(%esp), %ecx movl %edx, %ebp - xorl 56(%esp), %ecx + movl 48(%esp), %ebx rorl $2, %edx - xorl 16(%esp), %ecx + xorl 56(%esp), %ebx xorl %edi, %ebp - xorl 36(%esp), %ecx + xorl 16(%esp), %ebx xorl %esi, %ebp + xorl 36(%esp), %ebx .byte 209 -.byte 193 - movl %ecx, 48(%esp) - leal 3395469782(%ecx,%eax,1),%ecx - movl %ebx, %eax +.byte 195 + addl %eax, %ebp + movl %ebx, 48(%esp) + movl %ecx, %eax roll $5, %eax - addl %ebp, %ecx - addl %eax, %ecx + leal 3395469782(%ebx,%ebp),%ebx + addl %eax, %ebx + movl %ecx, %ebp movl 52(%esp), %eax - movl %ebx, %ebp + rorl $2, %ecx xorl 60(%esp), %eax - rorl $2, %ebx - xorl 20(%esp), %eax xorl %edx, %ebp - xorl 40(%esp), %eax + xorl 20(%esp), %eax xorl %edi, %ebp + xorl 40(%esp), %eax .byte 209 .byte 192 + addl %esi, %ebp movl %eax, 52(%esp) - leal 3395469782(%eax,%esi,1),%eax - movl %ecx, %esi + movl %ebx, %esi roll $5, %esi - addl %ebp, %eax + leal 3395469782(%eax,%ebp),%eax addl %esi, %eax + movl %ebx, %ebp movl 56(%esp), %esi - movl %ecx, %ebp + rorl $2, %ebx xorl (%esp), %esi - rorl $2, %ecx + xorl %ecx, %ebp xorl 24(%esp), %esi - xorl %ebx, %ebp - xorl 44(%esp), %esi xorl %edx, %ebp + xorl 44(%esp), %esi .byte 209 .byte 198 + addl %edi, %ebp movl %esi, 56(%esp) - leal 3395469782(%esi,%edi,1),%esi movl %eax, %edi roll $5, %edi - addl %ebp, %esi + leal 3395469782(%esi,%ebp),%esi addl %edi, %esi - movl 60(%esp), %edi movl %eax, %ebp - xorl 4(%esp), %edi + movl 60(%esp), %edi rorl $2, %eax + xorl 4(%esp), %edi + xorl %ebx, %ebp xorl 28(%esp), %edi xorl %ecx, %ebp xorl 48(%esp), %edi - xorl %ebx, %ebp .byte 209 .byte 199 + addl %edx, %ebp movl %edi, 60(%esp) - leal 3395469782(%edi,%edx,1),%edi movl %esi, %edx roll $5, %edx - addl %ebp, %edi + leal 3395469782(%edi,%ebp),%edi addl %edx, %edi - - - movl 128(%esp), %ebp - movl 12(%ebp), %edx - addl %ecx, %edx - movl 4(%ebp), %ecx - addl %esi, %ecx - movl %eax, %esi - movl (%ebp), %eax - movl %edx, 12(%ebp) - addl %edi, %eax - movl 16(%ebp), %edi - addl %ebx, %edi - movl 8(%ebp), %ebx - addl %esi, %ebx - movl %eax, (%ebp) - movl 132(%esp), %esi - movl %ebx, 8(%ebp) - addl $64, %esi - movl 68(%esp), %eax - movl %edi, 16(%ebp) - cmpl %eax, %esi - movl %ecx, 4(%ebp) - jl .L000start - addl $108, %esp + movl 84(%esp), %ebp + movl 88(%esp), %edx + addl (%ebp), %edi + addl 4(%ebp), %esi + addl 8(%ebp), %eax + addl 12(%ebp), %ebx + addl 16(%ebp), %ecx + movl %edi, (%ebp) + addl $64, %edx + movl %esi, 4(%ebp) + cmpl 92(%esp), %edx + movl %eax, 8(%ebp) + movl %ecx, %edi + movl %ebx, 12(%ebp) + movl %edx, %esi + movl %ecx, 16(%ebp) + jb .L000loop + addl $64, %esp popl %edi + popl %esi popl %ebx popl %ebp - popl %esi ret -.L_sha1_block_asm_data_order_end: - .size sha1_block_asm_data_order,.L_sha1_block_asm_data_order_end-sha1_block_asm_data_order -.ident "desasm.pl" -.text - .align 16 -.globl sha1_block_asm_host_order - .type sha1_block_asm_host_order,@function -sha1_block_asm_host_order: - movl 12(%esp), %ecx - pushl %esi - sall $6, %ecx - movl 12(%esp), %esi - pushl %ebp - addl %esi, %ecx - pushl %ebx - movl 16(%esp), %ebp - pushl %edi - movl 12(%ebp), %edx - subl $108, %esp - movl 16(%ebp), %edi - movl 8(%ebp), %ebx - movl %ecx, 68(%esp) - - movl (%esi), %eax - movl 4(%esi), %ecx - movl %eax, (%esp) - movl %ecx, 4(%esp) - movl 8(%esi), %eax - movl 12(%esi), %ecx - movl %eax, 8(%esp) - movl %ecx, 12(%esp) - movl 16(%esi), %eax - movl 20(%esi), %ecx - movl %eax, 16(%esp) - movl %ecx, 20(%esp) - movl 24(%esi), %eax - movl 28(%esi), %ecx - movl %eax, 24(%esp) - movl %ecx, 28(%esp) - movl 32(%esi), %eax - movl 36(%esi), %ecx - movl %eax, 32(%esp) - movl %ecx, 36(%esp) - movl 40(%esi), %eax - movl 44(%esi), %ecx - movl %eax, 40(%esp) - movl %ecx, 44(%esp) - movl 48(%esi), %eax - movl 52(%esi), %ecx - movl %eax, 48(%esp) - movl %ecx, 52(%esp) - movl 56(%esi), %eax - movl 60(%esi), %ecx - movl %eax, 56(%esp) - movl %ecx, 60(%esp) - jmp .L001shortcut -.L_sha1_block_asm_host_order_end: - .size sha1_block_asm_host_order,.L_sha1_block_asm_host_order_end-sha1_block_asm_host_order -.ident "desasm.pl" +.L_sha1_block_data_order_end: +.size sha1_block_data_order,.L_sha1_block_data_order_end-sha1_block_data_order +.ident "sha1_block_data_order" Index: secure/lib/libcrypto/i386/sha512-sse2.s =================================================================== --- secure/lib/libcrypto/i386/sha512-sse2.s (revision 0) +++ secure/lib/libcrypto/i386/sha512-sse2.s (revision 0) @@ -0,0 +1,385 @@ + # $FreeBSD$ + + + + + + + .file "sha512-sse2.s" +.text +.globl sha512_block_sse2 +.type sha512_block_sse2,@function +.align 16 +sha512_block_sse2: + pushl %ebp + movl %esp, %ebp + pushl %ebx + pushl %esi + pushl %edi + movl 8(%ebp), %edx + movl 12(%ebp), %edi + call .L000pic_point +.L000pic_point: + popl %esi + leal .L001K512-.L000pic_point(%esi),%esi + subl $320, %esp + andl $-16, %esp + movdqu (%edx), %xmm0 + movdqu 16(%edx), %xmm1 + movdqu 32(%edx), %xmm2 + movdqu 48(%edx), %xmm3 +.align 8 +.L002_chunk_loop: + movdqa %xmm0, 256(%esp) + movdqa %xmm1, 272(%esp) + movdqa %xmm2, 288(%esp) + movdqa %xmm3, 304(%esp) + xorl %edx, %edx + movdq2q %xmm0, %mm0 + movdq2q %xmm2, %mm1 + movl (%edi,%edx,8), %eax + movl 4(%edi,%edx,8), %ebx +.byte 15 +.byte 200 +.byte 15 +.byte 203 + movl %ebx, (%esp,%edx,8) + movl %eax, 4(%esp,%edx,8) + movl %ebx, 128(%esp,%edx,8) + movl %eax, 132(%esp,%edx,8) +.align 8 +.L003_1st_loop: + movl 8(%edi,%edx,8), %eax + movl 12(%edi,%edx,8),%ebx +.byte 15 +.byte 200 +.byte 15 +.byte 203 + movl %ebx, 8(%esp,%edx,8) + movl %eax, 12(%esp,%edx,8) + movl %ebx, 136(%esp,%edx,8) + movl %eax, 140(%esp,%edx,8) +.L004_1st_looplet: + movq 296(%esp), %mm4 + movq 304(%esp), %mm5 + movq 312(%esp), %mm6 + movq %mm1, %mm2 + movq %mm1, %mm3 + psrlq $14, %mm2 + psllq $23, %mm3 + movq %mm2, %mm7 + pxor %mm3, %mm7 + psrlq $4, %mm2 + psllq $23, %mm3 + pxor %mm2, %mm7 + pxor %mm3, %mm7 + psrlq $23, %mm2 + psllq $4, %mm3 + pxor %mm2, %mm7 + pxor %mm3, %mm7 + movq %mm1, 296(%esp) + movq %mm4, 304(%esp) + movq %mm5, 312(%esp) + pxor %mm5, %mm4 + pand %mm1, %mm4 + pxor %mm5, %mm4 + paddq %mm4, %mm7 + movq 264(%esp), %mm2 + movq 272(%esp), %mm3 + movq 280(%esp), %mm1 + paddq %mm6, %mm7 + paddq (%esi,%edx,8), %mm7 + paddq (%esp,%edx,8), %mm7 + paddq %mm7, %mm1 + movq %mm0, %mm4 + movq %mm0, %mm5 + psrlq $28, %mm4 + psllq $25, %mm5 + movq %mm4, %mm6 + pxor %mm5, %mm6 + psrlq $6, %mm4 + psllq $5, %mm5 + pxor %mm4, %mm6 + pxor %mm5, %mm6 + psrlq $5, %mm4 + psllq $6, %mm5 + pxor %mm4, %mm6 + pxor %mm5, %mm6 + movq %mm0, 264(%esp) + movq %mm2, 272(%esp) + movq %mm3, 280(%esp) + movq %mm0, %mm4 + por %mm3, %mm0 + pand %mm3, %mm4 + pand %mm2, %mm0 + por %mm0, %mm4 + paddq %mm4, %mm6 + movq %mm7, %mm0 + paddq %mm6, %mm0 + incl %edx + cmpl $15, %edx + jl .L003_1st_loop + je .L004_1st_looplet + movl %edx, %ebx +.align 8 +.L005_2nd_loop: + andl $15, %edx + movdqu 8(%esp,%edx,8), %xmm0 + movdqa %xmm0, %xmm2 + movdqa %xmm0, %xmm3 + psrlq $1, %xmm2 + psllq $56, %xmm3 + movdqa %xmm2, %xmm0 + pxor %xmm3, %xmm0 + psrlq $6, %xmm2 + psllq $7, %xmm3 + pxor %xmm2, %xmm0 + pxor %xmm3, %xmm0 + psrlq $1, %xmm2 + pxor %xmm2, %xmm0 + movdqa 112(%esp,%edx,8),%xmm1 + movdqa %xmm1, %xmm4 + movdqa %xmm1, %xmm5 + psrlq $6, %xmm4 + psllq $3, %xmm5 + movdqa %xmm4, %xmm1 + pxor %xmm5, %xmm1 + psrlq $13, %xmm4 + psllq $42, %xmm5 + pxor %xmm4, %xmm1 + pxor %xmm5, %xmm1 + psrlq $42, %xmm4 + pxor %xmm4, %xmm1 + movdqu 72(%esp,%edx,8),%xmm6 + paddq %xmm1, %xmm0 + paddq %xmm6, %xmm0 + paddq (%esp,%edx,8), %xmm0 + movdqa %xmm0, (%esp,%edx,8) + movdqa %xmm0, 128(%esp,%edx,8) + movq 296(%esp), %mm4 + movq 304(%esp), %mm5 + movq 312(%esp), %mm6 + movq %mm1, %mm2 + movq %mm1, %mm3 + psrlq $14, %mm2 + psllq $23, %mm3 + movq %mm2, %mm7 + pxor %mm3, %mm7 + psrlq $4, %mm2 + psllq $23, %mm3 + pxor %mm2, %mm7 + pxor %mm3, %mm7 + psrlq $23, %mm2 + psllq $4, %mm3 + pxor %mm2, %mm7 + pxor %mm3, %mm7 + movq %mm1, 296(%esp) + movq %mm4, 304(%esp) + movq %mm5, 312(%esp) + pxor %mm5, %mm4 + pand %mm1, %mm4 + pxor %mm5, %mm4 + paddq %mm4, %mm7 + movq 264(%esp), %mm2 + movq 272(%esp), %mm3 + movq 280(%esp), %mm1 + paddq %mm6, %mm7 + paddq (%esi,%ebx,8), %mm7 + paddq (%esp,%edx,8), %mm7 + paddq %mm7, %mm1 + movq %mm0, %mm4 + movq %mm0, %mm5 + psrlq $28, %mm4 + psllq $25, %mm5 + movq %mm4, %mm6 + pxor %mm5, %mm6 + psrlq $6, %mm4 + psllq $5, %mm5 + pxor %mm4, %mm6 + pxor %mm5, %mm6 + psrlq $5, %mm4 + psllq $6, %mm5 + pxor %mm4, %mm6 + pxor %mm5, %mm6 + movq %mm0, 264(%esp) + movq %mm2, 272(%esp) + movq %mm3, 280(%esp) + movq %mm0, %mm4 + por %mm3, %mm0 + pand %mm3, %mm4 + pand %mm2, %mm0 + por %mm0, %mm4 + paddq %mm4, %mm6 + movq %mm7, %mm0 + paddq %mm6, %mm0 + incl %ebx + incl %edx + movq 296(%esp), %mm4 + movq 304(%esp), %mm5 + movq 312(%esp), %mm6 + movq %mm1, %mm2 + movq %mm1, %mm3 + psrlq $14, %mm2 + psllq $23, %mm3 + movq %mm2, %mm7 + pxor %mm3, %mm7 + psrlq $4, %mm2 + psllq $23, %mm3 + pxor %mm2, %mm7 + pxor %mm3, %mm7 + psrlq $23, %mm2 + psllq $4, %mm3 + pxor %mm2, %mm7 + pxor %mm3, %mm7 + movq %mm1, 296(%esp) + movq %mm4, 304(%esp) + movq %mm5, 312(%esp) + pxor %mm5, %mm4 + pand %mm1, %mm4 + pxor %mm5, %mm4 + paddq %mm4, %mm7 + movq 264(%esp), %mm2 + movq 272(%esp), %mm3 + movq 280(%esp), %mm1 + paddq %mm6, %mm7 + paddq (%esi,%ebx,8), %mm7 + paddq (%esp,%edx,8), %mm7 + paddq %mm7, %mm1 + movq %mm0, %mm4 + movq %mm0, %mm5 + psrlq $28, %mm4 + psllq $25, %mm5 + movq %mm4, %mm6 + pxor %mm5, %mm6 + psrlq $6, %mm4 + psllq $5, %mm5 + pxor %mm4, %mm6 + pxor %mm5, %mm6 + psrlq $5, %mm4 + psllq $6, %mm5 + pxor %mm4, %mm6 + pxor %mm5, %mm6 + movq %mm0, 264(%esp) + movq %mm2, 272(%esp) + movq %mm3, 280(%esp) + movq %mm0, %mm4 + por %mm3, %mm0 + pand %mm3, %mm4 + pand %mm2, %mm0 + por %mm0, %mm4 + paddq %mm4, %mm6 + movq %mm7, %mm0 + paddq %mm6, %mm0 + incl %ebx + incl %edx + cmpl $80, %ebx + jl .L005_2nd_loop + movl 8(%ebp), %edx + movq %mm0, 256(%esp) + movq %mm1, 288(%esp) + movdqu (%edx), %xmm0 + movdqu 16(%edx), %xmm1 + movdqu 32(%edx), %xmm2 + movdqu 48(%edx), %xmm3 + paddq 256(%esp), %xmm0 + paddq 272(%esp), %xmm1 + paddq 288(%esp), %xmm2 + paddq 304(%esp), %xmm3 + movdqu %xmm0, (%edx) + movdqu %xmm1, 16(%edx) + movdqu %xmm2, 32(%edx) + movdqu %xmm3, 48(%edx) + addl $128, %edi + decl 16(%ebp) + jnz .L002_chunk_loop + emms + movl -12(%ebp), %edi + movl -8(%ebp), %esi + movl -4(%ebp), %ebx + leave + ret +.align 64 +.L001K512: + .long 3609767458,1116352408 + .long 602891725,1899447441 + .long 3964484399,3049323471 + .long 2173295548,3921009573 + .long 4081628472,961987163 + .long 3053834265,1508970993 + .long 2937671579,2453635748 + .long 3664609560,2870763221 + .long 2734883394,3624381080 + .long 1164996542,310598401 + .long 1323610764,607225278 + .long 3590304994,1426881987 + .long 4068182383,1925078388 + .long 991336113,2162078206 + .long 633803317,2614888103 + .long 3479774868,3248222580 + .long 2666613458,3835390401 + .long 944711139,4022224774 + .long 2341262773,264347078 + .long 2007800933,604807628 + .long 1495990901,770255983 + .long 1856431235,1249150122 + .long 3175218132,1555081692 + .long 2198950837,1996064986 + .long 3999719339,2554220882 + .long 766784016,2821834349 + .long 2566594879,2952996808 + .long 3203337956,3210313671 + .long 1034457026,3336571891 + .long 2466948901,3584528711 + .long 3758326383,113926993 + .long 168717936,338241895 + .long 1188179964,666307205 + .long 1546045734,773529912 + .long 1522805485,1294757372 + .long 2643833823,1396182291 + .long 2343527390,1695183700 + .long 1014477480,1986661051 + .long 1206759142,2177026350 + .long 344077627,2456956037 + .long 1290863460,2730485921 + .long 3158454273,2820302411 + .long 3505952657,3259730800 + .long 106217008,3345764771 + .long 3606008344,3516065817 + .long 1432725776,3600352804 + .long 1467031594,4094571909 + .long 851169720,275423344 + .long 3100823752,430227734 + .long 1363258195,506948616 + .long 3750685593,659060556 + .long 3785050280,883997877 + .long 3318307427,958139571 + .long 3812723403,1322822218 + .long 2003034995,1537002063 + .long 3602036899,1747873779 + .long 1575990012,1955562222 + .long 1125592928,2024104815 + .long 2716904306,2227730452 + .long 442776044,2361852424 + .long 593698344,2428436474 + .long 3733110249,2756734187 + .long 2999351573,3204031479 + .long 3815920427,3329325298 + .long 3928383900,3391569614 + .long 566280711,3515267271 + .long 3454069534,3940187606 + .long 4000239992,4118630271 + .long 1914138554,116418474 + .long 2731055270,174292421 + .long 3203993006,289380356 + .long 320620315,460393269 + .long 587496836,685471733 + .long 1086792851,852142971 + .long 365543100,1017036298 + .long 2618297676,1126000580 + .long 3409855158,1288033470 + .long 4234509866,1501505948 + .long 987167468,1607167915 + .long 1246189591,1816402316 +.L_sha512_block_sse2_end: +.size sha512_block_sse2,.L_sha512_block_sse2_end-sha512_block_sse2 +.ident "sha512_block_sse2" Property changes on: secure/lib/libcrypto/i386/sha512-sse2.s ___________________________________________________________________ Added: svn:mime-type + text/plain Added: svn:keywords + FreeBSD=%H Added: svn:eol-style + native Index: secure/lib/libcrypto/opensslconf-powerpc.h =================================================================== --- secure/lib/libcrypto/opensslconf-powerpc.h (revision 222101) +++ secure/lib/libcrypto/opensslconf-powerpc.h (working copy) @@ -5,35 +5,37 @@ /* OpenSSL was configured with the following options: */ #ifndef OPENSSL_DOING_MAKEDEPEND -/* Disabled by default in OpenSSL 0.9.8. */ + #ifndef OPENSSL_NO_CAMELLIA # define OPENSSL_NO_CAMELLIA #endif -/* Disabled by default in OpenSSL 0.9.8. */ +#ifndef OPENSSL_NO_CAPIENG +# define OPENSSL_NO_CAPIENG +#endif #ifndef OPENSSL_NO_CMS # define OPENSSL_NO_CMS #endif -/* Disabled by default in OpenSSL 0.9.8. */ -#ifndef OPENSSL_NO_SEED -# define OPENSSL_NO_SEED +#ifndef OPENSSL_NO_GMP +# define OPENSSL_NO_GMP #endif -/* jpake is marked experimental in OpenSSL 0.9.8. */ #ifndef OPENSSL_NO_JPAKE # define OPENSSL_NO_JPAKE #endif -/* libgmp is not in the FreeBSD base system. */ -#ifndef OPENSSL_NO_GMP -# define OPENSSL_NO_GMP -#endif -/* The Kerberos 5 support is MIT-specific. */ #ifndef OPENSSL_NO_KRB5 # define OPENSSL_NO_KRB5 #endif +#ifndef OPENSSL_NO_SEED +# define OPENSSL_NO_SEED +#endif #endif /* OPENSSL_DOING_MAKEDEPEND */ + #ifndef OPENSSL_THREADS # define OPENSSL_THREADS #endif +#ifndef OPENSSL_NO_ASM +# define OPENSSL_NO_ASM +#endif #ifndef OPENSSL_NO_STATIC_ENGINE # define OPENSSL_NO_STATIC_ENGINE #endif @@ -43,21 +45,46 @@ who haven't had the time to do the appropriate changes in their applications. */ #ifdef OPENSSL_ALGORITHM_DEFINES +# if defined(OPENSSL_NO_CAMELLIA) && !defined(NO_CAMELLIA) +# define NO_CAMELLIA +# endif +# if defined(OPENSSL_NO_CAPIENG) && !defined(NO_CAPIENG) +# define NO_CAPIENG +# endif +# if defined(OPENSSL_NO_CMS) && !defined(NO_CMS) +# define NO_CMS +# endif # if defined(OPENSSL_NO_GMP) && !defined(NO_GMP) # define NO_GMP # endif +# if defined(OPENSSL_NO_JPAKE) && !defined(NO_JPAKE) +# define NO_JPAKE +# endif # if defined(OPENSSL_NO_KRB5) && !defined(NO_KRB5) # define NO_KRB5 # endif +# if defined(OPENSSL_NO_SEED) && !defined(NO_SEED) +# define NO_SEED # endif -#ifdef OPENSSL_OTHER_DEFINES -# ifndef NO_ASM -# define NO_ASM -# endif #endif /* crypto/opensslconf.h.in */ +#ifdef OPENSSL_DOING_MAKEDEPEND + +/* Include any symbols here that have to be explicitly set to enable a feature + * that should be visible to makedepend. + * + * [Our "make depend" doesn't actually look at this, we use actual build settings + * instead; we want to make it easy to remove subdirectories with disabled algorithms.] + */ + +#ifndef OPENSSL_FIPS +#define OPENSSL_FIPS +#endif + +#endif + /* Generate 80386 code? */ #undef I386_ONLY @@ -102,9 +129,13 @@ * This enables code handling data aligned at natural CPU word * boundary. See crypto/rc4/rc4_enc.c for further details. */ +#ifdef __powerpc64__ +#define RC4_CHUNK unsigned long +#else #undef RC4_CHUNK #endif #endif +#endif #if (defined(HEADER_NEW_DES_H) || defined(HEADER_DES_H)) && !defined(DES_LONG) /* If this is set to 'unsigned int' on a DEC Alpha, this gives about a @@ -144,8 +175,12 @@ #define CONFIG_HEADER_RC4_LOCL_H /* if this is defined data[i] is used instead of *data, this is a %20 * speedup on x86 */ +#ifdef __powerpc64__ +#undef RC4_INDEX +#else #define RC4_INDEX #endif +#endif #if defined(HEADER_BF_LOCL_H) && !defined(CONFIG_HEADER_BF_LOCL_H) #define CONFIG_HEADER_BF_LOCL_H @@ -215,7 +250,7 @@ YOU SHOULD NOT HAVE BOTH DES_RISC1 AND DES_RISC2 D # define DES_PTR # define DES_RISC2 # define DES_UNROLL -#elif defined( i386 ) /* x86 boxes, should be gcc */ +#elif defined(i386) || defined(__i386__) /* x86 boxes, should be gcc */ # define DES_PTR # define DES_RISC1 # define DES_UNROLL Index: secure/lib/libcrypto/opensslconf-arm.h =================================================================== --- secure/lib/libcrypto/opensslconf-arm.h (revision 222101) +++ secure/lib/libcrypto/opensslconf-arm.h (working copy) @@ -1,39 +1,40 @@ -/* $FreeBSD$ */ /* opensslconf.h */ /* WARNING: Generated automatically from opensslconf.h.in by Configure. */ /* OpenSSL was configured with the following options: */ #ifndef OPENSSL_DOING_MAKEDEPEND -/* Disabled by default in OpenSSL 0.9.8. */ + #ifndef OPENSSL_NO_CAMELLIA # define OPENSSL_NO_CAMELLIA #endif -/* Disabled by default in OpenSSL 0.9.8. */ +#ifndef OPENSSL_NO_CAPIENG +# define OPENSSL_NO_CAPIENG +#endif #ifndef OPENSSL_NO_CMS # define OPENSSL_NO_CMS #endif -/* Disabled by default in OpenSSL 0.9.8. */ -#ifndef OPENSSL_NO_SEED -# define OPENSSL_NO_SEED -#endif -/* libgmp is not in the FreeBSD base system. */ #ifndef OPENSSL_NO_GMP # define OPENSSL_NO_GMP #endif -/* jpake is marked experimental in OpenSSL 0.9.8. */ #ifndef OPENSSL_NO_JPAKE # define OPENSSL_NO_JPAKE #endif -/* The Kerberos 5 support is MIT-specific. */ #ifndef OPENSSL_NO_KRB5 # define OPENSSL_NO_KRB5 #endif +#ifndef OPENSSL_NO_SEED +# define OPENSSL_NO_SEED +#endif #endif /* OPENSSL_DOING_MAKEDEPEND */ + #ifndef OPENSSL_THREADS # define OPENSSL_THREADS #endif +#ifndef OPENSSL_NO_ASM +# define OPENSSL_NO_ASM +#endif #ifndef OPENSSL_NO_STATIC_ENGINE # define OPENSSL_NO_STATIC_ENGINE #endif @@ -43,21 +44,46 @@ who haven't had the time to do the appropriate changes in their applications. */ #ifdef OPENSSL_ALGORITHM_DEFINES +# if defined(OPENSSL_NO_CAMELLIA) && !defined(NO_CAMELLIA) +# define NO_CAMELLIA +# endif +# if defined(OPENSSL_NO_CAPIENG) && !defined(NO_CAPIENG) +# define NO_CAPIENG +# endif +# if defined(OPENSSL_NO_CMS) && !defined(NO_CMS) +# define NO_CMS +# endif # if defined(OPENSSL_NO_GMP) && !defined(NO_GMP) # define NO_GMP # endif +# if defined(OPENSSL_NO_JPAKE) && !defined(NO_JPAKE) +# define NO_JPAKE +# endif # if defined(OPENSSL_NO_KRB5) && !defined(NO_KRB5) # define NO_KRB5 # endif +# if defined(OPENSSL_NO_SEED) && !defined(NO_SEED) +# define NO_SEED # endif -#ifdef OPENSSL_OTHER_DEFINES -# ifndef NO_ASM -# define NO_ASM -# endif #endif /* crypto/opensslconf.h.in */ +#ifdef OPENSSL_DOING_MAKEDEPEND + +/* Include any symbols here that have to be explicitly set to enable a feature + * that should be visible to makedepend. + * + * [Our "make depend" doesn't actually look at this, we use actual build settings + * instead; we want to make it easy to remove subdirectories with disabled algorithms.] + */ + +#ifndef OPENSSL_FIPS +#define OPENSSL_FIPS +#endif + +#endif + /* Generate 80386 code? */ #undef I386_ONLY @@ -110,7 +136,7 @@ /* If this is set to 'unsigned int' on a DEC Alpha, this gives about a * %20 speed up (longs are 8 bytes, int's are 4). */ #ifndef DES_LONG -#define DES_LONG unsigned long +#define DES_LONG unsigned int #endif #endif @@ -149,14 +175,14 @@ /* the following is tweaked from a config script, that is why it is a * protected undef/define */ #ifndef DES_PTR -#define DES_PTR +#undef DES_PTR #endif /* This helps C compiler generate the correct code for multiple functional * units. It reduces register dependancies at the expense of 2 more * registers */ #ifndef DES_RISC1 -#define DES_RISC1 +#undef DES_RISC1 #endif #ifndef DES_RISC2 @@ -206,7 +232,7 @@ YOU SHOULD NOT HAVE BOTH DES_RISC1 AND DES_RISC2 D # define DES_PTR # define DES_RISC2 # define DES_UNROLL -#elif defined( i386 ) /* x86 boxes, should be gcc */ +#elif defined(i386) || defined(__i386__) /* x86 boxes, should be gcc */ # define DES_PTR # define DES_RISC1 # define DES_UNROLL Index: secure/lib/libcrypto/Makefile.asm =================================================================== --- secure/lib/libcrypto/Makefile.asm (revision 222101) +++ secure/lib/libcrypto/Makefile.asm (working copy) @@ -1,23 +1,70 @@ # $FreeBSD$ -# Use this to help generate the asm *.s files after an import. It is not +# Use this to help generate the asm *.[Ss] files after an import. It is not # perfect by any means, but does what is needed. # Do a 'make -f Makefile.asm all' and it will generate *.s. Move them # to the i386 subdir, and correct any exposed paths and $ FreeBSD $ tags. -.if ${MACHINE_ARCH} == "i386" - .include "Makefile.inc" -.PATH: ${LCRYPTO_SRC}/crypto/rc4/asm ${LCRYPTO_SRC}/crypto/rc5/asm \ - ${LCRYPTO_SRC}/crypto/des/asm ${LCRYPTO_SRC}/crypto/cast/asm \ - ${LCRYPTO_SRC}/crypto/sha/asm ${LCRYPTO_SRC}/crypto/bn/asm \ - ${LCRYPTO_SRC}/crypto/bf/asm ${LCRYPTO_SRC}/crypto/md5/asm \ - ${LCRYPTO_SRC}/crypto/ripemd/asm +.if ${MACHINE_CPUARCH} == "amd64" +.PATH: ${LCRYPTO_SRC}/crypto ${LCRYPTO_SRC}/crypto/aes/asm \ + ${LCRYPTO_SRC}/crypto/bn/asm ${LCRYPTO_SRC}/crypto/md5/asm \ + ${LCRYPTO_SRC}/crypto/rc4/asm ${LCRYPTO_SRC}/crypto/rc5/asm \ + ${LCRYPTO_SRC}/crypto/sha/asm + +# aes +SRCS= aes-x86_64.pl + +# bn +SRCS+= x86_64-mont.pl + +# md5 +SRCS+= md5-x86_64.pl + +# rc4 +SRCS+= rc4-x86_64.pl + +# sha +SRCS+= sha1-x86_64.pl sha512-x86_64.pl + +ASM= ${SRCS:S/.pl/.S/} +ASM+= sha256-x86_64.S x86_64cpuid.S + +all: ${ASM} + +CLEANFILES+= ${SRCS:M*.pl:S/.pl$/.cmt/} ${SRCS:M*.pl:S/.pl$/.S/} +CLEANFILES+= sha256-x86_64.cmt sha256-x86_64.S x86_64cpuid.cmt x86_64cpuid.S +.SUFFIXES: .pl .cmt + +.pl.cmt: + ( cd `dirname ${.IMPSRC}`/.. ; perl ${.IMPSRC} ${.OBJDIR}/${.TARGET} ) + +.cmt.S: + ( echo ' # $$'FreeBSD'$$'; cat ${.IMPSRC} ) > ${.TARGET} + +sha256-x86_64.cmt: sha512-x86_64.pl + ( cd `dirname ${.ALLSRC}`/.. ; perl ${.ALLSRC} ${.OBJDIR}/${.TARGET} ) + +x86_64cpuid.cmt: x86_64cpuid.pl + ( cd `dirname ${.ALLSRC}` ; perl ${.ALLSRC} ${.OBJDIR}/${.TARGET} ) + +.elif ${MACHINE_CPUARCH} == "i386" + +.PATH: ${LCRYPTO_SRC}/crypto ${LCRYPTO_SRC}/crypto/aes/asm \ + ${LCRYPTO_SRC}/crypto/bf/asm ${LCRYPTO_SRC}/crypto/bn/asm \ + ${LCRYPTO_SRC}/crypto/cast/asm ${LCRYPTO_SRC}/crypto/des/asm \ + ${LCRYPTO_SRC}/crypto/md5/asm ${LCRYPTO_SRC}/crypto/rc4/asm \ + ${LCRYPTO_SRC}/crypto/rc5/asm ${LCRYPTO_SRC}/crypto/ripemd/asm \ + ${LCRYPTO_SRC}/crypto/sha/asm + PERLPATH= -I${LCRYPTO_SRC}/crypto/des/asm -I${LCRYPTO_SRC}/crypto/perlasm +# aes +SRCS= aes-586.pl + # blowfish -SRCS= bf-686.pl bf-586.pl +SRCS+= bf-686.pl bf-586.pl # bn SRCS+= bn-586.pl co-586.pl @@ -41,21 +88,21 @@ SRCS+= rc5-586.pl SRCS+= rmd-586.pl # sha -SRCS+= sha1-586.pl +SRCS+= sha1-586.pl sha512-sse2.pl +# cpuid +SRCS+= x86cpuid.pl + ASM= ${SRCS:S/.pl/.s/} all: ${ASM} -CLEANFILES+= ${SRCS:M*.pl:S/.pl$/.cmt/} ${SRCS:M*.pl:S/.pl$/.s/} -.SUFFIXES: .pl .cmt +CLEANFILES+= ${SRCS:M*.pl:S/.pl$/.s/} +.SUFFIXES: .pl -.pl.cmt: +.pl.s: ( echo ' # $$'FreeBSD'$$' ;\ - perl ${PERLPATH} ${.IMPSRC} elf ${CPUTYPE:Mi386:S/i//} ) > ${.TARGET} + perl ${PERLPATH} ${.IMPSRC} elf ${CFLAGS} ) > ${.TARGET} +.endif -.cmt.s: - tr -d "'" < ${.IMPSRC} > ${.TARGET} - .include -.endif Index: secure/lib/libcrypto/opensslconf-sparc64.h =================================================================== --- secure/lib/libcrypto/opensslconf-sparc64.h (revision 222101) +++ secure/lib/libcrypto/opensslconf-sparc64.h (working copy) @@ -5,35 +5,37 @@ /* OpenSSL was configured with the following options: */ #ifndef OPENSSL_DOING_MAKEDEPEND -/* Disabled by default in OpenSSL 0.9.8. */ + #ifndef OPENSSL_NO_CAMELLIA # define OPENSSL_NO_CAMELLIA #endif -/* Disabled by default in OpenSSL 0.9.8. */ +#ifndef OPENSSL_NO_CAPIENG +# define OPENSSL_NO_CAPIENG +#endif #ifndef OPENSSL_NO_CMS # define OPENSSL_NO_CMS #endif -/* Disabled by default in OpenSSL 0.9.8. */ -#ifndef OPENSSL_NO_SEED -# define OPENSSL_NO_SEED +#ifndef OPENSSL_NO_GMP +# define OPENSSL_NO_GMP #endif -/* jpake is marked experimental in OpenSSL 0.9.8. */ #ifndef OPENSSL_NO_JPAKE # define OPENSSL_NO_JPAKE #endif -/* libgmp is not in the FreeBSD base system. */ -#ifndef OPENSSL_NO_GMP -# define OPENSSL_NO_GMP -#endif -/* The Kerberos 5 support is MIT-specific. */ #ifndef OPENSSL_NO_KRB5 # define OPENSSL_NO_KRB5 #endif +#ifndef OPENSSL_NO_SEED +# define OPENSSL_NO_SEED +#endif #endif /* OPENSSL_DOING_MAKEDEPEND */ + #ifndef OPENSSL_THREADS # define OPENSSL_THREADS #endif +#ifndef OPENSSL_NO_ASM +# define OPENSSL_NO_ASM +#endif #ifndef OPENSSL_NO_STATIC_ENGINE # define OPENSSL_NO_STATIC_ENGINE #endif @@ -43,21 +45,46 @@ who haven't had the time to do the appropriate changes in their applications. */ #ifdef OPENSSL_ALGORITHM_DEFINES +# if defined(OPENSSL_NO_CAMELLIA) && !defined(NO_CAMELLIA) +# define NO_CAMELLIA +# endif +# if defined(OPENSSL_NO_CAPIENG) && !defined(NO_CAPIENG) +# define NO_CAPIENG +# endif +# if defined(OPENSSL_NO_CMS) && !defined(NO_CMS) +# define NO_CMS +# endif # if defined(OPENSSL_NO_GMP) && !defined(NO_GMP) # define NO_GMP # endif +# if defined(OPENSSL_NO_JPAKE) && !defined(NO_JPAKE) +# define NO_JPAKE +# endif # if defined(OPENSSL_NO_KRB5) && !defined(NO_KRB5) # define NO_KRB5 # endif +# if defined(OPENSSL_NO_SEED) && !defined(NO_SEED) +# define NO_SEED # endif -#ifdef OPENSSL_OTHER_DEFINES -# ifndef NO_ASM -# define NO_ASM -# endif #endif /* crypto/opensslconf.h.in */ +#ifdef OPENSSL_DOING_MAKEDEPEND + +/* Include any symbols here that have to be explicitly set to enable a feature + * that should be visible to makedepend. + * + * [Our "make depend" doesn't actually look at this, we use actual build settings + * instead; we want to make it easy to remove subdirectories with disabled algorithms.] + */ + +#ifndef OPENSSL_FIPS +#define OPENSSL_FIPS +#endif + +#endif + /* Generate 80386 code? */ #undef I386_ONLY @@ -102,7 +129,7 @@ * This enables code handling data aligned at natural CPU word * boundary. See crypto/rc4/rc4_enc.c for further details. */ -#undef RC4_CHUNK +#define RC4_CHUNK unsigned long #endif #endif @@ -140,7 +167,7 @@ #if defined(HEADER_BF_LOCL_H) && !defined(CONFIG_HEADER_BF_LOCL_H) #define CONFIG_HEADER_BF_LOCL_H -#undef BF_PTR +#define BF_PTR #endif /* HEADER_BF_LOCL_H */ #if defined(HEADER_DES_LOCL_H) && !defined(CONFIG_HEADER_DES_LOCL_H) @@ -206,7 +233,7 @@ YOU SHOULD NOT HAVE BOTH DES_RISC1 AND DES_RISC2 D # define DES_PTR # define DES_RISC2 # define DES_UNROLL -#elif defined( i386 ) /* x86 boxes, should be gcc */ +#elif defined(i386) || defined(__i386__) /* x86 boxes, should be gcc */ # define DES_PTR # define DES_RISC1 # define DES_UNROLL Index: secure/lib/libcrypto/opensslconf-i386.h =================================================================== --- secure/lib/libcrypto/opensslconf-i386.h (revision 222101) +++ secure/lib/libcrypto/opensslconf-i386.h (working copy) @@ -5,28 +5,28 @@ /* OpenSSL was configured with the following options: */ #ifndef OPENSSL_DOING_MAKEDEPEND -/* Disabled by default in OpenSSL 0.9.8. */ + +#ifndef OPENSSL_NO_CAPIENG +# define OPENSSL_NO_CAPIENG +#endif #ifndef OPENSSL_NO_CMS # define OPENSSL_NO_CMS #endif -/* Disabled by default in OpenSSL 0.9.8. */ -#ifndef OPENSSL_NO_SEED -# define OPENSSL_NO_SEED -#endif -/* libgmp is not in the FreeBSD base system. */ #ifndef OPENSSL_NO_GMP # define OPENSSL_NO_GMP #endif -/* jpake is marked experimental in OpenSSL 0.9.8. */ #ifndef OPENSSL_NO_JPAKE # define OPENSSL_NO_JPAKE #endif -/* The Kerberos 5 support is MIT-specific. */ #ifndef OPENSSL_NO_KRB5 # define OPENSSL_NO_KRB5 #endif +#ifndef OPENSSL_NO_SEED +# define OPENSSL_NO_SEED +#endif #endif /* OPENSSL_DOING_MAKEDEPEND */ + #ifndef OPENSSL_THREADS # define OPENSSL_THREADS #endif @@ -39,21 +39,45 @@ who haven't had the time to do the appropriate changes in their applications. */ #ifdef OPENSSL_ALGORITHM_DEFINES +# if defined(OPENSSL_NO_CAPIENG) && !defined(NO_CAPIENG) +# define NO_CAPIENG +# endif +# if defined(OPENSSL_NO_CMS) && !defined(NO_CMS) +# define NO_CMS +# endif # if defined(OPENSSL_NO_GMP) && !defined(NO_GMP) # define NO_GMP # endif +# if defined(OPENSSL_NO_JPAKE) && !defined(NO_JPAKE) +# define NO_JPAKE +# endif # if defined(OPENSSL_NO_KRB5) && !defined(NO_KRB5) # define NO_KRB5 # endif +# if defined(OPENSSL_NO_SEED) && !defined(NO_SEED) +# define NO_SEED # endif -#ifdef OPENSSL_OTHER_DEFINES -# ifndef NO_ASM -# define NO_ASM -# endif #endif +#define OPENSSL_CPUID_OBJ + /* crypto/opensslconf.h.in */ +#ifdef OPENSSL_DOING_MAKEDEPEND + +/* Include any symbols here that have to be explicitly set to enable a feature + * that should be visible to makedepend. + * + * [Our "make depend" doesn't actually look at this, we use actual build settings + * instead; we want to make it easy to remove subdirectories with disabled algorithms.] + */ + +#ifndef OPENSSL_FIPS +#define OPENSSL_FIPS +#endif + +#endif + /* Generate 80386 code? */ #undef I386_ONLY @@ -202,7 +226,7 @@ YOU SHOULD NOT HAVE BOTH DES_RISC1 AND DES_RISC2 D # define DES_PTR # define DES_RISC2 # define DES_UNROLL -#elif defined( i386 ) /* x86 boxes, should be gcc */ +#elif defined(i386) || defined(__i386__) /* x86 boxes, should be gcc */ # define DES_PTR # define DES_RISC1 # define DES_UNROLL Index: secure/lib/libcrypto/amd64/sha512-x86_64.S =================================================================== --- secure/lib/libcrypto/amd64/sha512-x86_64.S (revision 0) +++ secure/lib/libcrypto/amd64/sha512-x86_64.S (revision 0) @@ -0,0 +1,1994 @@ + # $FreeBSD$ +.text + +.globl sha512_block_data_order +.type sha512_block_data_order,@function +.align 16 +sha512_block_data_order: + pushq %rbx + pushq %rbp + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + movq %rsp,%rbp + shlq $4,%rdx + subq $128+32,%rsp + leaq (%rsi,%rdx,8),%rdx + andq $-64,%rsp + movq %rdi,128+0(%rsp) + movq %rsi,128+8(%rsp) + movq %rdx,128+16(%rsp) + movq %rbp,128+24(%rsp) + + .long 0x12d8d48,0x90000000 + leaq K512-.(%rbp),%rbp + + movq 0(%rdi),%rax + movq 8(%rdi),%rbx + movq 16(%rdi),%rcx + movq 24(%rdi),%rdx + movq 32(%rdi),%r8 + movq 40(%rdi),%r9 + movq 48(%rdi),%r10 + movq 56(%rdi),%r11 + jmp .Lloop + +.align 16 +.Lloop: + xorq %rdi,%rdi + movq 0(%rsi),%r12 + bswapq %r12 + movq %r8,%r13 + movq %r8,%r14 + movq %r9,%r15 + + rorq $14,%r13 + rorq $18,%r14 + xorq %r10,%r15 + + xorq %r14,%r13 + rorq $23,%r14 + andq %r8,%r15 + movq %r12,0(%rsp) + + xorq %r14,%r13 + xorq %r10,%r15 + addq %r11,%r12 + + movq %rax,%r11 + addq %r13,%r12 + + addq %r15,%r12 + movq %rax,%r13 + movq %rax,%r14 + + rorq $28,%r11 + rorq $34,%r13 + movq %rax,%r15 + addq (%rbp,%rdi,8),%r12 + + xorq %r13,%r11 + rorq $5,%r13 + orq %rcx,%r14 + + xorq %r13,%r11 + andq %rcx,%r15 + addq %r12,%rdx + + andq %rbx,%r14 + addq %r12,%r11 + + orq %r15,%r14 + leaq 1(%rdi),%rdi + + addq %r14,%r11 + movq 8(%rsi),%r12 + bswapq %r12 + movq %rdx,%r13 + movq %rdx,%r14 + movq %r8,%r15 + + rorq $14,%r13 + rorq $18,%r14 + xorq %r9,%r15 + + xorq %r14,%r13 + rorq $23,%r14 + andq %rdx,%r15 + movq %r12,8(%rsp) + + xorq %r14,%r13 + xorq %r9,%r15 + addq %r10,%r12 + + movq %r11,%r10 + addq %r13,%r12 + + addq %r15,%r12 + movq %r11,%r13 + movq %r11,%r14 + + rorq $28,%r10 + rorq $34,%r13 + movq %r11,%r15 + addq (%rbp,%rdi,8),%r12 + + xorq %r13,%r10 + rorq $5,%r13 + orq %rbx,%r14 + + xorq %r13,%r10 + andq %rbx,%r15 + addq %r12,%rcx + + andq %rax,%r14 + addq %r12,%r10 + + orq %r15,%r14 + leaq 1(%rdi),%rdi + + addq %r14,%r10 + movq 16(%rsi),%r12 + bswapq %r12 + movq %rcx,%r13 + movq %rcx,%r14 + movq %rdx,%r15 + + rorq $14,%r13 + rorq $18,%r14 + xorq %r8,%r15 + + xorq %r14,%r13 + rorq $23,%r14 + andq %rcx,%r15 + movq %r12,16(%rsp) + + xorq %r14,%r13 + xorq %r8,%r15 + addq %r9,%r12 + + movq %r10,%r9 + addq %r13,%r12 + + addq %r15,%r12 + movq %r10,%r13 + movq %r10,%r14 + + rorq $28,%r9 + rorq $34,%r13 + movq %r10,%r15 + addq (%rbp,%rdi,8),%r12 + + xorq %r13,%r9 + rorq $5,%r13 + orq %rax,%r14 + + xorq %r13,%r9 + andq %rax,%r15 + addq %r12,%rbx + + andq %r11,%r14 + addq %r12,%r9 + + orq %r15,%r14 + leaq 1(%rdi),%rdi + + addq %r14,%r9 + movq 24(%rsi),%r12 + bswapq %r12 + movq %rbx,%r13 + movq %rbx,%r14 + movq %rcx,%r15 + + rorq $14,%r13 + rorq $18,%r14 + xorq %rdx,%r15 + + xorq %r14,%r13 + rorq $23,%r14 + andq %rbx,%r15 + movq %r12,24(%rsp) + + xorq %r14,%r13 + xorq %rdx,%r15 + addq %r8,%r12 + + movq %r9,%r8 + addq %r13,%r12 + + addq %r15,%r12 + movq %r9,%r13 + movq %r9,%r14 + + rorq $28,%r8 + rorq $34,%r13 + movq %r9,%r15 + addq (%rbp,%rdi,8),%r12 + + xorq %r13,%r8 + rorq $5,%r13 + orq %r11,%r14 + + xorq %r13,%r8 + andq %r11,%r15 + addq %r12,%rax + + andq %r10,%r14 + addq %r12,%r8 + + orq %r15,%r14 + leaq 1(%rdi),%rdi + + addq %r14,%r8 + movq 32(%rsi),%r12 + bswapq %r12 + movq %rax,%r13 + movq %rax,%r14 + movq %rbx,%r15 + + rorq $14,%r13 + rorq $18,%r14 + xorq %rcx,%r15 + + xorq %r14,%r13 + rorq $23,%r14 + andq %rax,%r15 + movq %r12,32(%rsp) + + xorq %r14,%r13 + xorq %rcx,%r15 + addq %rdx,%r12 + + movq %r8,%rdx + addq %r13,%r12 + + addq %r15,%r12 + movq %r8,%r13 + movq %r8,%r14 + + rorq $28,%rdx + rorq $34,%r13 + movq %r8,%r15 + addq (%rbp,%rdi,8),%r12 + + xorq %r13,%rdx + rorq $5,%r13 + orq %r10,%r14 + + xorq %r13,%rdx + andq %r10,%r15 + addq %r12,%r11 + + andq %r9,%r14 + addq %r12,%rdx + + orq %r15,%r14 + leaq 1(%rdi),%rdi + + addq %r14,%rdx + movq 40(%rsi),%r12 + bswapq %r12 + movq %r11,%r13 + movq %r11,%r14 + movq %rax,%r15 + + rorq $14,%r13 + rorq $18,%r14 + xorq %rbx,%r15 + + xorq %r14,%r13 + rorq $23,%r14 + andq %r11,%r15 + movq %r12,40(%rsp) + + xorq %r14,%r13 + xorq %rbx,%r15 + addq %rcx,%r12 + + movq %rdx,%rcx + addq %r13,%r12 + + addq %r15,%r12 + movq %rdx,%r13 + movq %rdx,%r14 + + rorq $28,%rcx + rorq $34,%r13 + movq %rdx,%r15 + addq (%rbp,%rdi,8),%r12 + + xorq %r13,%rcx + rorq $5,%r13 + orq %r9,%r14 + + xorq %r13,%rcx + andq %r9,%r15 + addq %r12,%r10 + + andq %r8,%r14 + addq %r12,%rcx + + orq %r15,%r14 + leaq 1(%rdi),%rdi + + addq %r14,%rcx + movq 48(%rsi),%r12 + bswapq %r12 + movq %r10,%r13 + movq %r10,%r14 + movq %r11,%r15 + + rorq $14,%r13 + rorq $18,%r14 + xorq %rax,%r15 + + xorq %r14,%r13 + rorq $23,%r14 + andq %r10,%r15 + movq %r12,48(%rsp) + + xorq %r14,%r13 + xorq %rax,%r15 + addq %rbx,%r12 + + movq %rcx,%rbx + addq %r13,%r12 + + addq %r15,%r12 + movq %rcx,%r13 + movq %rcx,%r14 + + rorq $28,%rbx + rorq $34,%r13 + movq %rcx,%r15 + addq (%rbp,%rdi,8),%r12 + + xorq %r13,%rbx + rorq $5,%r13 + orq %r8,%r14 + + xorq %r13,%rbx + andq %r8,%r15 + addq %r12,%r9 + + andq %rdx,%r14 + addq %r12,%rbx + + orq %r15,%r14 + leaq 1(%rdi),%rdi + + addq %r14,%rbx + movq 56(%rsi),%r12 + bswapq %r12 + movq %r9,%r13 + movq %r9,%r14 + movq %r10,%r15 + + rorq $14,%r13 + rorq $18,%r14 + xorq %r11,%r15 + + xorq %r14,%r13 + rorq $23,%r14 + andq %r9,%r15 + movq %r12,56(%rsp) + + xorq %r14,%r13 + xorq %r11,%r15 + addq %rax,%r12 + + movq %rbx,%rax + addq %r13,%r12 + + addq %r15,%r12 + movq %rbx,%r13 + movq %rbx,%r14 + + rorq $28,%rax + rorq $34,%r13 + movq %rbx,%r15 + addq (%rbp,%rdi,8),%r12 + + xorq %r13,%rax + rorq $5,%r13 + orq %rdx,%r14 + + xorq %r13,%rax + andq %rdx,%r15 + addq %r12,%r8 + + andq %rcx,%r14 + addq %r12,%rax + + orq %r15,%r14 + leaq 1(%rdi),%rdi + + addq %r14,%rax + movq 64(%rsi),%r12 + bswapq %r12 + movq %r8,%r13 + movq %r8,%r14 + movq %r9,%r15 + + rorq $14,%r13 + rorq $18,%r14 + xorq %r10,%r15 + + xorq %r14,%r13 + rorq $23,%r14 + andq %r8,%r15 + movq %r12,64(%rsp) + + xorq %r14,%r13 + xorq %r10,%r15 + addq %r11,%r12 + + movq %rax,%r11 + addq %r13,%r12 + + addq %r15,%r12 + movq %rax,%r13 + movq %rax,%r14 + + rorq $28,%r11 + rorq $34,%r13 + movq %rax,%r15 + addq (%rbp,%rdi,8),%r12 + + xorq %r13,%r11 + rorq $5,%r13 + orq %rcx,%r14 + + xorq %r13,%r11 + andq %rcx,%r15 + addq %r12,%rdx + + andq %rbx,%r14 + addq %r12,%r11 + + orq %r15,%r14 + leaq 1(%rdi),%rdi + + addq %r14,%r11 + movq 72(%rsi),%r12 + bswapq %r12 + movq %rdx,%r13 + movq %rdx,%r14 + movq %r8,%r15 + + rorq $14,%r13 + rorq $18,%r14 + xorq %r9,%r15 + + xorq %r14,%r13 + rorq $23,%r14 + andq %rdx,%r15 + movq %r12,72(%rsp) + + xorq %r14,%r13 + xorq %r9,%r15 + addq %r10,%r12 + + movq %r11,%r10 + addq %r13,%r12 + + addq %r15,%r12 + movq %r11,%r13 + movq %r11,%r14 + + rorq $28,%r10 + rorq $34,%r13 + movq %r11,%r15 + addq (%rbp,%rdi,8),%r12 + + xorq %r13,%r10 + rorq $5,%r13 + orq %rbx,%r14 + + xorq %r13,%r10 + andq %rbx,%r15 + addq %r12,%rcx + + andq %rax,%r14 + addq %r12,%r10 + + orq %r15,%r14 + leaq 1(%rdi),%rdi + + addq %r14,%r10 + movq 80(%rsi),%r12 + bswapq %r12 + movq %rcx,%r13 + movq %rcx,%r14 + movq %rdx,%r15 + + rorq $14,%r13 + rorq $18,%r14 + xorq %r8,%r15 + + xorq %r14,%r13 + rorq $23,%r14 + andq %rcx,%r15 + movq %r12,80(%rsp) + + xorq %r14,%r13 + xorq %r8,%r15 + addq %r9,%r12 + + movq %r10,%r9 + addq %r13,%r12 + + addq %r15,%r12 + movq %r10,%r13 + movq %r10,%r14 + + rorq $28,%r9 + rorq $34,%r13 + movq %r10,%r15 + addq (%rbp,%rdi,8),%r12 + + xorq %r13,%r9 + rorq $5,%r13 + orq %rax,%r14 + + xorq %r13,%r9 + andq %rax,%r15 + addq %r12,%rbx + + andq %r11,%r14 + addq %r12,%r9 + + orq %r15,%r14 + leaq 1(%rdi),%rdi + + addq %r14,%r9 + movq 88(%rsi),%r12 + bswapq %r12 + movq %rbx,%r13 + movq %rbx,%r14 + movq %rcx,%r15 + + rorq $14,%r13 + rorq $18,%r14 + xorq %rdx,%r15 + + xorq %r14,%r13 + rorq $23,%r14 + andq %rbx,%r15 + movq %r12,88(%rsp) + + xorq %r14,%r13 + xorq %rdx,%r15 + addq %r8,%r12 + + movq %r9,%r8 + addq %r13,%r12 + + addq %r15,%r12 + movq %r9,%r13 + movq %r9,%r14 + + rorq $28,%r8 + rorq $34,%r13 + movq %r9,%r15 + addq (%rbp,%rdi,8),%r12 + + xorq %r13,%r8 + rorq $5,%r13 + orq %r11,%r14 + + xorq %r13,%r8 + andq %r11,%r15 + addq %r12,%rax + + andq %r10,%r14 + addq %r12,%r8 + + orq %r15,%r14 + leaq 1(%rdi),%rdi + + addq %r14,%r8 + movq 96(%rsi),%r12 + bswapq %r12 + movq %rax,%r13 + movq %rax,%r14 + movq %rbx,%r15 + + rorq $14,%r13 + rorq $18,%r14 + xorq %rcx,%r15 + + xorq %r14,%r13 + rorq $23,%r14 + andq %rax,%r15 + movq %r12,96(%rsp) + + xorq %r14,%r13 + xorq %rcx,%r15 + addq %rdx,%r12 + + movq %r8,%rdx + addq %r13,%r12 + + addq %r15,%r12 + movq %r8,%r13 + movq %r8,%r14 + + rorq $28,%rdx + rorq $34,%r13 + movq %r8,%r15 + addq (%rbp,%rdi,8),%r12 + + xorq %r13,%rdx + rorq $5,%r13 + orq %r10,%r14 + + xorq %r13,%rdx + andq %r10,%r15 + addq %r12,%r11 + + andq %r9,%r14 + addq %r12,%rdx + + orq %r15,%r14 + leaq 1(%rdi),%rdi + + addq %r14,%rdx + movq 104(%rsi),%r12 + bswapq %r12 + movq %r11,%r13 + movq %r11,%r14 + movq %rax,%r15 + + rorq $14,%r13 + rorq $18,%r14 + xorq %rbx,%r15 + + xorq %r14,%r13 + rorq $23,%r14 + andq %r11,%r15 + movq %r12,104(%rsp) + + xorq %r14,%r13 + xorq %rbx,%r15 + addq %rcx,%r12 + + movq %rdx,%rcx + addq %r13,%r12 + + addq %r15,%r12 + movq %rdx,%r13 + movq %rdx,%r14 + + rorq $28,%rcx + rorq $34,%r13 + movq %rdx,%r15 + addq (%rbp,%rdi,8),%r12 + + xorq %r13,%rcx + rorq $5,%r13 + orq %r9,%r14 + + xorq %r13,%rcx + andq %r9,%r15 + addq %r12,%r10 + + andq %r8,%r14 + addq %r12,%rcx + + orq %r15,%r14 + leaq 1(%rdi),%rdi + + addq %r14,%rcx + movq 112(%rsi),%r12 + bswapq %r12 + movq %r10,%r13 + movq %r10,%r14 + movq %r11,%r15 + + rorq $14,%r13 + rorq $18,%r14 + xorq %rax,%r15 + + xorq %r14,%r13 + rorq $23,%r14 + andq %r10,%r15 + movq %r12,112(%rsp) + + xorq %r14,%r13 + xorq %rax,%r15 + addq %rbx,%r12 + + movq %rcx,%rbx + addq %r13,%r12 + + addq %r15,%r12 + movq %rcx,%r13 + movq %rcx,%r14 + + rorq $28,%rbx + rorq $34,%r13 + movq %rcx,%r15 + addq (%rbp,%rdi,8),%r12 + + xorq %r13,%rbx + rorq $5,%r13 + orq %r8,%r14 + + xorq %r13,%rbx + andq %r8,%r15 + addq %r12,%r9 + + andq %rdx,%r14 + addq %r12,%rbx + + orq %r15,%r14 + leaq 1(%rdi),%rdi + + addq %r14,%rbx + movq 120(%rsi),%r12 + bswapq %r12 + movq %r9,%r13 + movq %r9,%r14 + movq %r10,%r15 + + rorq $14,%r13 + rorq $18,%r14 + xorq %r11,%r15 + + xorq %r14,%r13 + rorq $23,%r14 + andq %r9,%r15 + movq %r12,120(%rsp) + + xorq %r14,%r13 + xorq %r11,%r15 + addq %rax,%r12 + + movq %rbx,%rax + addq %r13,%r12 + + addq %r15,%r12 + movq %rbx,%r13 + movq %rbx,%r14 + + rorq $28,%rax + rorq $34,%r13 + movq %rbx,%r15 + addq (%rbp,%rdi,8),%r12 + + xorq %r13,%rax + rorq $5,%r13 + orq %rdx,%r14 + + xorq %r13,%rax + andq %rdx,%r15 + addq %r12,%r8 + + andq %rcx,%r14 + addq %r12,%rax + + orq %r15,%r14 + leaq 1(%rdi),%rdi + + addq %r14,%rax + jmp .Lrounds_16_xx +.align 16 +.Lrounds_16_xx: + movq 8(%rsp),%r13 + movq 112(%rsp),%r12 + + movq %r13,%r15 + + shrq $7,%r13 + rorq $1,%r15 + + xorq %r15,%r13 + rorq $7,%r15 + + xorq %r15,%r13 + movq %r12,%r14 + + shrq $6,%r12 + rorq $19,%r14 + + xorq %r14,%r12 + rorq $42,%r14 + + xorq %r14,%r12 + + addq %r13,%r12 + + addq 72(%rsp),%r12 + + addq 0(%rsp),%r12 + movq %r8,%r13 + movq %r8,%r14 + movq %r9,%r15 + + rorq $14,%r13 + rorq $18,%r14 + xorq %r10,%r15 + + xorq %r14,%r13 + rorq $23,%r14 + andq %r8,%r15 + movq %r12,0(%rsp) + + xorq %r14,%r13 + xorq %r10,%r15 + addq %r11,%r12 + + movq %rax,%r11 + addq %r13,%r12 + + addq %r15,%r12 + movq %rax,%r13 + movq %rax,%r14 + + rorq $28,%r11 + rorq $34,%r13 + movq %rax,%r15 + addq (%rbp,%rdi,8),%r12 + + xorq %r13,%r11 + rorq $5,%r13 + orq %rcx,%r14 + + xorq %r13,%r11 + andq %rcx,%r15 + addq %r12,%rdx + + andq %rbx,%r14 + addq %r12,%r11 + + orq %r15,%r14 + leaq 1(%rdi),%rdi + + addq %r14,%r11 + movq 16(%rsp),%r13 + movq 120(%rsp),%r12 + + movq %r13,%r15 + + shrq $7,%r13 + rorq $1,%r15 + + xorq %r15,%r13 + rorq $7,%r15 + + xorq %r15,%r13 + movq %r12,%r14 + + shrq $6,%r12 + rorq $19,%r14 + + xorq %r14,%r12 + rorq $42,%r14 + + xorq %r14,%r12 + + addq %r13,%r12 + + addq 80(%rsp),%r12 + + addq 8(%rsp),%r12 + movq %rdx,%r13 + movq %rdx,%r14 + movq %r8,%r15 + + rorq $14,%r13 + rorq $18,%r14 + xorq %r9,%r15 + + xorq %r14,%r13 + rorq $23,%r14 + andq %rdx,%r15 + movq %r12,8(%rsp) + + xorq %r14,%r13 + xorq %r9,%r15 + addq %r10,%r12 + + movq %r11,%r10 + addq %r13,%r12 + + addq %r15,%r12 + movq %r11,%r13 + movq %r11,%r14 + + rorq $28,%r10 + rorq $34,%r13 + movq %r11,%r15 + addq (%rbp,%rdi,8),%r12 + + xorq %r13,%r10 + rorq $5,%r13 + orq %rbx,%r14 + + xorq %r13,%r10 + andq %rbx,%r15 + addq %r12,%rcx + + andq %rax,%r14 + addq %r12,%r10 + + orq %r15,%r14 + leaq 1(%rdi),%rdi + + addq %r14,%r10 + movq 24(%rsp),%r13 + movq 0(%rsp),%r12 + + movq %r13,%r15 + + shrq $7,%r13 + rorq $1,%r15 + + xorq %r15,%r13 + rorq $7,%r15 + + xorq %r15,%r13 + movq %r12,%r14 + + shrq $6,%r12 + rorq $19,%r14 + + xorq %r14,%r12 + rorq $42,%r14 + + xorq %r14,%r12 + + addq %r13,%r12 + + addq 88(%rsp),%r12 + + addq 16(%rsp),%r12 + movq %rcx,%r13 + movq %rcx,%r14 + movq %rdx,%r15 + + rorq $14,%r13 + rorq $18,%r14 + xorq %r8,%r15 + + xorq %r14,%r13 + rorq $23,%r14 + andq %rcx,%r15 + movq %r12,16(%rsp) + + xorq %r14,%r13 + xorq %r8,%r15 + addq %r9,%r12 + + movq %r10,%r9 + addq %r13,%r12 + + addq %r15,%r12 + movq %r10,%r13 + movq %r10,%r14 + + rorq $28,%r9 + rorq $34,%r13 + movq %r10,%r15 + addq (%rbp,%rdi,8),%r12 + + xorq %r13,%r9 + rorq $5,%r13 + orq %rax,%r14 + + xorq %r13,%r9 + andq %rax,%r15 + addq %r12,%rbx + + andq %r11,%r14 + addq %r12,%r9 + + orq %r15,%r14 + leaq 1(%rdi),%rdi + + addq %r14,%r9 + movq 32(%rsp),%r13 + movq 8(%rsp),%r12 + + movq %r13,%r15 + + shrq $7,%r13 + rorq $1,%r15 + + xorq %r15,%r13 + rorq $7,%r15 + + xorq %r15,%r13 + movq %r12,%r14 + + shrq $6,%r12 + rorq $19,%r14 + + xorq %r14,%r12 + rorq $42,%r14 + + xorq %r14,%r12 + + addq %r13,%r12 + + addq 96(%rsp),%r12 + + addq 24(%rsp),%r12 + movq %rbx,%r13 + movq %rbx,%r14 + movq %rcx,%r15 + + rorq $14,%r13 + rorq $18,%r14 + xorq %rdx,%r15 + + xorq %r14,%r13 + rorq $23,%r14 + andq %rbx,%r15 + movq %r12,24(%rsp) + + xorq %r14,%r13 + xorq %rdx,%r15 + addq %r8,%r12 + + movq %r9,%r8 + addq %r13,%r12 + + addq %r15,%r12 + movq %r9,%r13 + movq %r9,%r14 + + rorq $28,%r8 + rorq $34,%r13 + movq %r9,%r15 + addq (%rbp,%rdi,8),%r12 + + xorq %r13,%r8 + rorq $5,%r13 + orq %r11,%r14 + + xorq %r13,%r8 + andq %r11,%r15 + addq %r12,%rax + + andq %r10,%r14 + addq %r12,%r8 + + orq %r15,%r14 + leaq 1(%rdi),%rdi + + addq %r14,%r8 + movq 40(%rsp),%r13 + movq 16(%rsp),%r12 + + movq %r13,%r15 + + shrq $7,%r13 + rorq $1,%r15 + + xorq %r15,%r13 + rorq $7,%r15 + + xorq %r15,%r13 + movq %r12,%r14 + + shrq $6,%r12 + rorq $19,%r14 + + xorq %r14,%r12 + rorq $42,%r14 + + xorq %r14,%r12 + + addq %r13,%r12 + + addq 104(%rsp),%r12 + + addq 32(%rsp),%r12 + movq %rax,%r13 + movq %rax,%r14 + movq %rbx,%r15 + + rorq $14,%r13 + rorq $18,%r14 + xorq %rcx,%r15 + + xorq %r14,%r13 + rorq $23,%r14 + andq %rax,%r15 + movq %r12,32(%rsp) + + xorq %r14,%r13 + xorq %rcx,%r15 + addq %rdx,%r12 + + movq %r8,%rdx + addq %r13,%r12 + + addq %r15,%r12 + movq %r8,%r13 + movq %r8,%r14 + + rorq $28,%rdx + rorq $34,%r13 + movq %r8,%r15 + addq (%rbp,%rdi,8),%r12 + + xorq %r13,%rdx + rorq $5,%r13 + orq %r10,%r14 + + xorq %r13,%rdx + andq %r10,%r15 + addq %r12,%r11 + + andq %r9,%r14 + addq %r12,%rdx + + orq %r15,%r14 + leaq 1(%rdi),%rdi + + addq %r14,%rdx + movq 48(%rsp),%r13 + movq 24(%rsp),%r12 + + movq %r13,%r15 + + shrq $7,%r13 + rorq $1,%r15 + + xorq %r15,%r13 + rorq $7,%r15 + + xorq %r15,%r13 + movq %r12,%r14 + + shrq $6,%r12 + rorq $19,%r14 + + xorq %r14,%r12 + rorq $42,%r14 + + xorq %r14,%r12 + + addq %r13,%r12 + + addq 112(%rsp),%r12 + + addq 40(%rsp),%r12 + movq %r11,%r13 + movq %r11,%r14 + movq %rax,%r15 + + rorq $14,%r13 + rorq $18,%r14 + xorq %rbx,%r15 + + xorq %r14,%r13 + rorq $23,%r14 + andq %r11,%r15 + movq %r12,40(%rsp) + + xorq %r14,%r13 + xorq %rbx,%r15 + addq %rcx,%r12 + + movq %rdx,%rcx + addq %r13,%r12 + + addq %r15,%r12 + movq %rdx,%r13 + movq %rdx,%r14 + + rorq $28,%rcx + rorq $34,%r13 + movq %rdx,%r15 + addq (%rbp,%rdi,8),%r12 + + xorq %r13,%rcx + rorq $5,%r13 + orq %r9,%r14 + + xorq %r13,%rcx + andq %r9,%r15 + addq %r12,%r10 + + andq %r8,%r14 + addq %r12,%rcx + + orq %r15,%r14 + leaq 1(%rdi),%rdi + + addq %r14,%rcx + movq 56(%rsp),%r13 + movq 32(%rsp),%r12 + + movq %r13,%r15 + + shrq $7,%r13 + rorq $1,%r15 + + xorq %r15,%r13 + rorq $7,%r15 + + xorq %r15,%r13 + movq %r12,%r14 + + shrq $6,%r12 + rorq $19,%r14 + + xorq %r14,%r12 + rorq $42,%r14 + + xorq %r14,%r12 + + addq %r13,%r12 + + addq 120(%rsp),%r12 + + addq 48(%rsp),%r12 + movq %r10,%r13 + movq %r10,%r14 + movq %r11,%r15 + + rorq $14,%r13 + rorq $18,%r14 + xorq %rax,%r15 + + xorq %r14,%r13 + rorq $23,%r14 + andq %r10,%r15 + movq %r12,48(%rsp) + + xorq %r14,%r13 + xorq %rax,%r15 + addq %rbx,%r12 + + movq %rcx,%rbx + addq %r13,%r12 + + addq %r15,%r12 + movq %rcx,%r13 + movq %rcx,%r14 + + rorq $28,%rbx + rorq $34,%r13 + movq %rcx,%r15 + addq (%rbp,%rdi,8),%r12 + + xorq %r13,%rbx + rorq $5,%r13 + orq %r8,%r14 + + xorq %r13,%rbx + andq %r8,%r15 + addq %r12,%r9 + + andq %rdx,%r14 + addq %r12,%rbx + + orq %r15,%r14 + leaq 1(%rdi),%rdi + + addq %r14,%rbx + movq 64(%rsp),%r13 + movq 40(%rsp),%r12 + + movq %r13,%r15 + + shrq $7,%r13 + rorq $1,%r15 + + xorq %r15,%r13 + rorq $7,%r15 + + xorq %r15,%r13 + movq %r12,%r14 + + shrq $6,%r12 + rorq $19,%r14 + + xorq %r14,%r12 + rorq $42,%r14 + + xorq %r14,%r12 + + addq %r13,%r12 + + addq 0(%rsp),%r12 + + addq 56(%rsp),%r12 + movq %r9,%r13 + movq %r9,%r14 + movq %r10,%r15 + + rorq $14,%r13 + rorq $18,%r14 + xorq %r11,%r15 + + xorq %r14,%r13 + rorq $23,%r14 + andq %r9,%r15 + movq %r12,56(%rsp) + + xorq %r14,%r13 + xorq %r11,%r15 + addq %rax,%r12 + + movq %rbx,%rax + addq %r13,%r12 + + addq %r15,%r12 + movq %rbx,%r13 + movq %rbx,%r14 + + rorq $28,%rax + rorq $34,%r13 + movq %rbx,%r15 + addq (%rbp,%rdi,8),%r12 + + xorq %r13,%rax + rorq $5,%r13 + orq %rdx,%r14 + + xorq %r13,%rax + andq %rdx,%r15 + addq %r12,%r8 + + andq %rcx,%r14 + addq %r12,%rax + + orq %r15,%r14 + leaq 1(%rdi),%rdi + + addq %r14,%rax + movq 72(%rsp),%r13 + movq 48(%rsp),%r12 + + movq %r13,%r15 + + shrq $7,%r13 + rorq $1,%r15 + + xorq %r15,%r13 + rorq $7,%r15 + + xorq %r15,%r13 + movq %r12,%r14 + + shrq $6,%r12 + rorq $19,%r14 + + xorq %r14,%r12 + rorq $42,%r14 + + xorq %r14,%r12 + + addq %r13,%r12 + + addq 8(%rsp),%r12 + + addq 64(%rsp),%r12 + movq %r8,%r13 + movq %r8,%r14 + movq %r9,%r15 + + rorq $14,%r13 + rorq $18,%r14 + xorq %r10,%r15 + + xorq %r14,%r13 + rorq $23,%r14 + andq %r8,%r15 + movq %r12,64(%rsp) + + xorq %r14,%r13 + xorq %r10,%r15 + addq %r11,%r12 + + movq %rax,%r11 + addq %r13,%r12 + + addq %r15,%r12 + movq %rax,%r13 + movq %rax,%r14 + + rorq $28,%r11 + rorq $34,%r13 + movq %rax,%r15 + addq (%rbp,%rdi,8),%r12 + + xorq %r13,%r11 + rorq $5,%r13 + orq %rcx,%r14 + + xorq %r13,%r11 + andq %rcx,%r15 + addq %r12,%rdx + + andq %rbx,%r14 + addq %r12,%r11 + + orq %r15,%r14 + leaq 1(%rdi),%rdi + + addq %r14,%r11 + movq 80(%rsp),%r13 + movq 56(%rsp),%r12 + + movq %r13,%r15 + + shrq $7,%r13 + rorq $1,%r15 + + xorq %r15,%r13 + rorq $7,%r15 + + xorq %r15,%r13 + movq %r12,%r14 + + shrq $6,%r12 + rorq $19,%r14 + + xorq %r14,%r12 + rorq $42,%r14 + + xorq %r14,%r12 + + addq %r13,%r12 + + addq 16(%rsp),%r12 + + addq 72(%rsp),%r12 + movq %rdx,%r13 + movq %rdx,%r14 + movq %r8,%r15 + + rorq $14,%r13 + rorq $18,%r14 + xorq %r9,%r15 + + xorq %r14,%r13 + rorq $23,%r14 + andq %rdx,%r15 + movq %r12,72(%rsp) + + xorq %r14,%r13 + xorq %r9,%r15 + addq %r10,%r12 + + movq %r11,%r10 + addq %r13,%r12 + + addq %r15,%r12 + movq %r11,%r13 + movq %r11,%r14 + + rorq $28,%r10 + rorq $34,%r13 + movq %r11,%r15 + addq (%rbp,%rdi,8),%r12 + + xorq %r13,%r10 + rorq $5,%r13 + orq %rbx,%r14 + + xorq %r13,%r10 + andq %rbx,%r15 + addq %r12,%rcx + + andq %rax,%r14 + addq %r12,%r10 + + orq %r15,%r14 + leaq 1(%rdi),%rdi + + addq %r14,%r10 + movq 88(%rsp),%r13 + movq 64(%rsp),%r12 + + movq %r13,%r15 + + shrq $7,%r13 + rorq $1,%r15 + + xorq %r15,%r13 + rorq $7,%r15 + + xorq %r15,%r13 + movq %r12,%r14 + + shrq $6,%r12 + rorq $19,%r14 + + xorq %r14,%r12 + rorq $42,%r14 + + xorq %r14,%r12 + + addq %r13,%r12 + + addq 24(%rsp),%r12 + + addq 80(%rsp),%r12 + movq %rcx,%r13 + movq %rcx,%r14 + movq %rdx,%r15 + + rorq $14,%r13 + rorq $18,%r14 + xorq %r8,%r15 + + xorq %r14,%r13 + rorq $23,%r14 + andq %rcx,%r15 + movq %r12,80(%rsp) + + xorq %r14,%r13 + xorq %r8,%r15 + addq %r9,%r12 + + movq %r10,%r9 + addq %r13,%r12 + + addq %r15,%r12 + movq %r10,%r13 + movq %r10,%r14 + + rorq $28,%r9 + rorq $34,%r13 + movq %r10,%r15 + addq (%rbp,%rdi,8),%r12 + + xorq %r13,%r9 + rorq $5,%r13 + orq %rax,%r14 + + xorq %r13,%r9 + andq %rax,%r15 + addq %r12,%rbx + + andq %r11,%r14 + addq %r12,%r9 + + orq %r15,%r14 + leaq 1(%rdi),%rdi + + addq %r14,%r9 + movq 96(%rsp),%r13 + movq 72(%rsp),%r12 + + movq %r13,%r15 + + shrq $7,%r13 + rorq $1,%r15 + + xorq %r15,%r13 + rorq $7,%r15 + + xorq %r15,%r13 + movq %r12,%r14 + + shrq $6,%r12 + rorq $19,%r14 + + xorq %r14,%r12 + rorq $42,%r14 + + xorq %r14,%r12 + + addq %r13,%r12 + + addq 32(%rsp),%r12 + + addq 88(%rsp),%r12 + movq %rbx,%r13 + movq %rbx,%r14 + movq %rcx,%r15 + + rorq $14,%r13 + rorq $18,%r14 + xorq %rdx,%r15 + + xorq %r14,%r13 + rorq $23,%r14 + andq %rbx,%r15 + movq %r12,88(%rsp) + + xorq %r14,%r13 + xorq %rdx,%r15 + addq %r8,%r12 + + movq %r9,%r8 + addq %r13,%r12 + + addq %r15,%r12 + movq %r9,%r13 + movq %r9,%r14 + + rorq $28,%r8 + rorq $34,%r13 + movq %r9,%r15 + addq (%rbp,%rdi,8),%r12 + + xorq %r13,%r8 + rorq $5,%r13 + orq %r11,%r14 + + xorq %r13,%r8 + andq %r11,%r15 + addq %r12,%rax + + andq %r10,%r14 + addq %r12,%r8 + + orq %r15,%r14 + leaq 1(%rdi),%rdi + + addq %r14,%r8 + movq 104(%rsp),%r13 + movq 80(%rsp),%r12 + + movq %r13,%r15 + + shrq $7,%r13 + rorq $1,%r15 + + xorq %r15,%r13 + rorq $7,%r15 + + xorq %r15,%r13 + movq %r12,%r14 + + shrq $6,%r12 + rorq $19,%r14 + + xorq %r14,%r12 + rorq $42,%r14 + + xorq %r14,%r12 + + addq %r13,%r12 + + addq 40(%rsp),%r12 + + addq 96(%rsp),%r12 + movq %rax,%r13 + movq %rax,%r14 + movq %rbx,%r15 + + rorq $14,%r13 + rorq $18,%r14 + xorq %rcx,%r15 + + xorq %r14,%r13 + rorq $23,%r14 + andq %rax,%r15 + movq %r12,96(%rsp) + + xorq %r14,%r13 + xorq %rcx,%r15 + addq %rdx,%r12 + + movq %r8,%rdx + addq %r13,%r12 + + addq %r15,%r12 + movq %r8,%r13 + movq %r8,%r14 + + rorq $28,%rdx + rorq $34,%r13 + movq %r8,%r15 + addq (%rbp,%rdi,8),%r12 + + xorq %r13,%rdx + rorq $5,%r13 + orq %r10,%r14 + + xorq %r13,%rdx + andq %r10,%r15 + addq %r12,%r11 + + andq %r9,%r14 + addq %r12,%rdx + + orq %r15,%r14 + leaq 1(%rdi),%rdi + + addq %r14,%rdx + movq 112(%rsp),%r13 + movq 88(%rsp),%r12 + + movq %r13,%r15 + + shrq $7,%r13 + rorq $1,%r15 + + xorq %r15,%r13 + rorq $7,%r15 + + xorq %r15,%r13 + movq %r12,%r14 + + shrq $6,%r12 + rorq $19,%r14 + + xorq %r14,%r12 + rorq $42,%r14 + + xorq %r14,%r12 + + addq %r13,%r12 + + addq 48(%rsp),%r12 + + addq 104(%rsp),%r12 + movq %r11,%r13 + movq %r11,%r14 + movq %rax,%r15 + + rorq $14,%r13 + rorq $18,%r14 + xorq %rbx,%r15 + + xorq %r14,%r13 + rorq $23,%r14 + andq %r11,%r15 + movq %r12,104(%rsp) + + xorq %r14,%r13 + xorq %rbx,%r15 + addq %rcx,%r12 + + movq %rdx,%rcx + addq %r13,%r12 + + addq %r15,%r12 + movq %rdx,%r13 + movq %rdx,%r14 + + rorq $28,%rcx + rorq $34,%r13 + movq %rdx,%r15 + addq (%rbp,%rdi,8),%r12 + + xorq %r13,%rcx + rorq $5,%r13 + orq %r9,%r14 + + xorq %r13,%rcx + andq %r9,%r15 + addq %r12,%r10 + + andq %r8,%r14 + addq %r12,%rcx + + orq %r15,%r14 + leaq 1(%rdi),%rdi + + addq %r14,%rcx + movq 120(%rsp),%r13 + movq 96(%rsp),%r12 + + movq %r13,%r15 + + shrq $7,%r13 + rorq $1,%r15 + + xorq %r15,%r13 + rorq $7,%r15 + + xorq %r15,%r13 + movq %r12,%r14 + + shrq $6,%r12 + rorq $19,%r14 + + xorq %r14,%r12 + rorq $42,%r14 + + xorq %r14,%r12 + + addq %r13,%r12 + + addq 56(%rsp),%r12 + + addq 112(%rsp),%r12 + movq %r10,%r13 + movq %r10,%r14 + movq %r11,%r15 + + rorq $14,%r13 + rorq $18,%r14 + xorq %rax,%r15 + + xorq %r14,%r13 + rorq $23,%r14 + andq %r10,%r15 + movq %r12,112(%rsp) + + xorq %r14,%r13 + xorq %rax,%r15 + addq %rbx,%r12 + + movq %rcx,%rbx + addq %r13,%r12 + + addq %r15,%r12 + movq %rcx,%r13 + movq %rcx,%r14 + + rorq $28,%rbx + rorq $34,%r13 + movq %rcx,%r15 + addq (%rbp,%rdi,8),%r12 + + xorq %r13,%rbx + rorq $5,%r13 + orq %r8,%r14 + + xorq %r13,%rbx + andq %r8,%r15 + addq %r12,%r9 + + andq %rdx,%r14 + addq %r12,%rbx + + orq %r15,%r14 + leaq 1(%rdi),%rdi + + addq %r14,%rbx + movq 0(%rsp),%r13 + movq 104(%rsp),%r12 + + movq %r13,%r15 + + shrq $7,%r13 + rorq $1,%r15 + + xorq %r15,%r13 + rorq $7,%r15 + + xorq %r15,%r13 + movq %r12,%r14 + + shrq $6,%r12 + rorq $19,%r14 + + xorq %r14,%r12 + rorq $42,%r14 + + xorq %r14,%r12 + + addq %r13,%r12 + + addq 64(%rsp),%r12 + + addq 120(%rsp),%r12 + movq %r9,%r13 + movq %r9,%r14 + movq %r10,%r15 + + rorq $14,%r13 + rorq $18,%r14 + xorq %r11,%r15 + + xorq %r14,%r13 + rorq $23,%r14 + andq %r9,%r15 + movq %r12,120(%rsp) + + xorq %r14,%r13 + xorq %r11,%r15 + addq %rax,%r12 + + movq %rbx,%rax + addq %r13,%r12 + + addq %r15,%r12 + movq %rbx,%r13 + movq %rbx,%r14 + + rorq $28,%rax + rorq $34,%r13 + movq %rbx,%r15 + addq (%rbp,%rdi,8),%r12 + + xorq %r13,%rax + rorq $5,%r13 + orq %rdx,%r14 + + xorq %r13,%rax + andq %rdx,%r15 + addq %r12,%r8 + + andq %rcx,%r14 + addq %r12,%rax + + orq %r15,%r14 + leaq 1(%rdi),%rdi + + addq %r14,%rax + cmpq $80,%rdi + jb .Lrounds_16_xx + + movq 128+0(%rsp),%rdi + leaq 128(%rsi),%rsi + + addq 0(%rdi),%rax + addq 8(%rdi),%rbx + addq 16(%rdi),%rcx + addq 24(%rdi),%rdx + addq 32(%rdi),%r8 + addq 40(%rdi),%r9 + addq 48(%rdi),%r10 + addq 56(%rdi),%r11 + + cmpq 128+16(%rsp),%rsi + + movq %rax,0(%rdi) + movq %rbx,8(%rdi) + movq %rcx,16(%rdi) + movq %rdx,24(%rdi) + movq %r8,32(%rdi) + movq %r9,40(%rdi) + movq %r10,48(%rdi) + movq %r11,56(%rdi) + jb .Lloop + + movq 128+24(%rsp),%rsp + popq %r15 + popq %r14 + popq %r13 + popq %r12 + popq %rbp + popq %rbx + + .byte 0xf3,0xc3 +.size sha512_block_data_order,.-sha512_block_data_order +.align 64 +.type K512,@object +K512: +.quad 0x428a2f98d728ae22,0x7137449123ef65cd +.quad 0xb5c0fbcfec4d3b2f,0xe9b5dba58189dbbc +.quad 0x3956c25bf348b538,0x59f111f1b605d019 +.quad 0x923f82a4af194f9b,0xab1c5ed5da6d8118 +.quad 0xd807aa98a3030242,0x12835b0145706fbe +.quad 0x243185be4ee4b28c,0x550c7dc3d5ffb4e2 +.quad 0x72be5d74f27b896f,0x80deb1fe3b1696b1 +.quad 0x9bdc06a725c71235,0xc19bf174cf692694 +.quad 0xe49b69c19ef14ad2,0xefbe4786384f25e3 +.quad 0x0fc19dc68b8cd5b5,0x240ca1cc77ac9c65 +.quad 0x2de92c6f592b0275,0x4a7484aa6ea6e483 +.quad 0x5cb0a9dcbd41fbd4,0x76f988da831153b5 +.quad 0x983e5152ee66dfab,0xa831c66d2db43210 +.quad 0xb00327c898fb213f,0xbf597fc7beef0ee4 +.quad 0xc6e00bf33da88fc2,0xd5a79147930aa725 +.quad 0x06ca6351e003826f,0x142929670a0e6e70 +.quad 0x27b70a8546d22ffc,0x2e1b21385c26c926 +.quad 0x4d2c6dfc5ac42aed,0x53380d139d95b3df +.quad 0x650a73548baf63de,0x766a0abb3c77b2a8 +.quad 0x81c2c92e47edaee6,0x92722c851482353b +.quad 0xa2bfe8a14cf10364,0xa81a664bbc423001 +.quad 0xc24b8b70d0f89791,0xc76c51a30654be30 +.quad 0xd192e819d6ef5218,0xd69906245565a910 +.quad 0xf40e35855771202a,0x106aa07032bbd1b8 +.quad 0x19a4c116b8d2d0c8,0x1e376c085141ab53 +.quad 0x2748774cdf8eeb99,0x34b0bcb5e19b48a8 +.quad 0x391c0cb3c5c95a63,0x4ed8aa4ae3418acb +.quad 0x5b9cca4f7763e373,0x682e6ff3d6b2b8a3 +.quad 0x748f82ee5defb2fc,0x78a5636f43172f60 +.quad 0x84c87814a1f0ab72,0x8cc702081a6439ec +.quad 0x90befffa23631e28,0xa4506cebde82bde9 +.quad 0xbef9a3f7b2c67915,0xc67178f2e372532b +.quad 0xca273eceea26619c,0xd186b8c721c0c207 +.quad 0xeada7dd6cde0eb1e,0xf57d4f7fee6ed178 +.quad 0x06f067aa72176fba,0x0a637dc5a2c898a6 +.quad 0x113f9804bef90dae,0x1b710b35131c471b +.quad 0x28db77f523047d84,0x32caab7b40c72493 +.quad 0x3c9ebe0a15c9bebc,0x431d67c49c100d4c +.quad 0x4cc5d4becb3e42b6,0x597f299cfc657e2a +.quad 0x5fcb6fab3ad6faec,0x6c44198c4a475817 Property changes on: secure/lib/libcrypto/amd64/sha512-x86_64.S ___________________________________________________________________ Added: svn:mime-type + text/plain Added: svn:keywords + FreeBSD=%H Added: svn:eol-style + native Index: secure/lib/libcrypto/amd64/md5-x86_64.S =================================================================== --- secure/lib/libcrypto/amd64/md5-x86_64.S (revision 0) +++ secure/lib/libcrypto/amd64/md5-x86_64.S (revision 0) @@ -0,0 +1,631 @@ + # $FreeBSD$ +.text +.align 16 + +.globl md5_block_asm_data_order +.type md5_block_asm_data_order,@function +md5_block_asm_data_order: + pushq %rbp + pushq %rbx + pushq %r14 + pushq %r15 + + + + + movq %rdi,%rbp + shlq $6,%rdx + leaq (%rsi,%rdx,1),%rdi + movl 0(%rbp),%eax + movl 4(%rbp),%ebx + movl 8(%rbp),%ecx + movl 12(%rbp),%edx + + + + + + + + cmpq %rdi,%rsi + je .Lend + + +.Lloop: + movl %eax,%r8d + movl %ebx,%r9d + movl %ecx,%r14d + movl %edx,%r15d + movl 0(%rsi),%r10d + movl %edx,%r11d + xorl %ecx,%r11d + leal -680876936(%rax,%r10,1),%eax + andl %ebx,%r11d + xorl %edx,%r11d + movl 4(%rsi),%r10d + addl %r11d,%eax + roll $7,%eax + movl %ecx,%r11d + addl %ebx,%eax + xorl %ebx,%r11d + leal -389564586(%rdx,%r10,1),%edx + andl %eax,%r11d + xorl %ecx,%r11d + movl 8(%rsi),%r10d + addl %r11d,%edx + roll $12,%edx + movl %ebx,%r11d + addl %eax,%edx + xorl %eax,%r11d + leal 606105819(%rcx,%r10,1),%ecx + andl %edx,%r11d + xorl %ebx,%r11d + movl 12(%rsi),%r10d + addl %r11d,%ecx + roll $17,%ecx + movl %eax,%r11d + addl %edx,%ecx + xorl %edx,%r11d + leal -1044525330(%rbx,%r10,1),%ebx + andl %ecx,%r11d + xorl %eax,%r11d + movl 16(%rsi),%r10d + addl %r11d,%ebx + roll $22,%ebx + movl %edx,%r11d + addl %ecx,%ebx + xorl %ecx,%r11d + leal -176418897(%rax,%r10,1),%eax + andl %ebx,%r11d + xorl %edx,%r11d + movl 20(%rsi),%r10d + addl %r11d,%eax + roll $7,%eax + movl %ecx,%r11d + addl %ebx,%eax + xorl %ebx,%r11d + leal 1200080426(%rdx,%r10,1),%edx + andl %eax,%r11d + xorl %ecx,%r11d + movl 24(%rsi),%r10d + addl %r11d,%edx + roll $12,%edx + movl %ebx,%r11d + addl %eax,%edx + xorl %eax,%r11d + leal -1473231341(%rcx,%r10,1),%ecx + andl %edx,%r11d + xorl %ebx,%r11d + movl 28(%rsi),%r10d + addl %r11d,%ecx + roll $17,%ecx + movl %eax,%r11d + addl %edx,%ecx + xorl %edx,%r11d + leal -45705983(%rbx,%r10,1),%ebx + andl %ecx,%r11d + xorl %eax,%r11d + movl 32(%rsi),%r10d + addl %r11d,%ebx + roll $22,%ebx + movl %edx,%r11d + addl %ecx,%ebx + xorl %ecx,%r11d + leal 1770035416(%rax,%r10,1),%eax + andl %ebx,%r11d + xorl %edx,%r11d + movl 36(%rsi),%r10d + addl %r11d,%eax + roll $7,%eax + movl %ecx,%r11d + addl %ebx,%eax + xorl %ebx,%r11d + leal -1958414417(%rdx,%r10,1),%edx + andl %eax,%r11d + xorl %ecx,%r11d + movl 40(%rsi),%r10d + addl %r11d,%edx + roll $12,%edx + movl %ebx,%r11d + addl %eax,%edx + xorl %eax,%r11d + leal -42063(%rcx,%r10,1),%ecx + andl %edx,%r11d + xorl %ebx,%r11d + movl 44(%rsi),%r10d + addl %r11d,%ecx + roll $17,%ecx + movl %eax,%r11d + addl %edx,%ecx + xorl %edx,%r11d + leal -1990404162(%rbx,%r10,1),%ebx + andl %ecx,%r11d + xorl %eax,%r11d + movl 48(%rsi),%r10d + addl %r11d,%ebx + roll $22,%ebx + movl %edx,%r11d + addl %ecx,%ebx + xorl %ecx,%r11d + leal 1804603682(%rax,%r10,1),%eax + andl %ebx,%r11d + xorl %edx,%r11d + movl 52(%rsi),%r10d + addl %r11d,%eax + roll $7,%eax + movl %ecx,%r11d + addl %ebx,%eax + xorl %ebx,%r11d + leal -40341101(%rdx,%r10,1),%edx + andl %eax,%r11d + xorl %ecx,%r11d + movl 56(%rsi),%r10d + addl %r11d,%edx + roll $12,%edx + movl %ebx,%r11d + addl %eax,%edx + xorl %eax,%r11d + leal -1502002290(%rcx,%r10,1),%ecx + andl %edx,%r11d + xorl %ebx,%r11d + movl 60(%rsi),%r10d + addl %r11d,%ecx + roll $17,%ecx + movl %eax,%r11d + addl %edx,%ecx + xorl %edx,%r11d + leal 1236535329(%rbx,%r10,1),%ebx + andl %ecx,%r11d + xorl %eax,%r11d + movl 0(%rsi),%r10d + addl %r11d,%ebx + roll $22,%ebx + movl %edx,%r11d + addl %ecx,%ebx + movl 4(%rsi),%r10d + movl %ecx,%r11d + xorl %ebx,%r11d + leal -165796510(%rax,%r10,1),%eax + andl %edx,%r11d + xorl %ecx,%r11d + movl 24(%rsi),%r10d + addl %r11d,%eax + roll $5,%eax + movl %ebx,%r11d + addl %ebx,%eax + xorl %eax,%r11d + leal -1069501632(%rdx,%r10,1),%edx + andl %ecx,%r11d + xorl %ebx,%r11d + movl 44(%rsi),%r10d + addl %r11d,%edx + roll $9,%edx + movl %eax,%r11d + addl %eax,%edx + xorl %edx,%r11d + leal 643717713(%rcx,%r10,1),%ecx + andl %ebx,%r11d + xorl %eax,%r11d + movl 0(%rsi),%r10d + addl %r11d,%ecx + roll $14,%ecx + movl %edx,%r11d + addl %edx,%ecx + xorl %ecx,%r11d + leal -373897302(%rbx,%r10,1),%ebx + andl %eax,%r11d + xorl %edx,%r11d + movl 20(%rsi),%r10d + addl %r11d,%ebx + roll $20,%ebx + movl %ecx,%r11d + addl %ecx,%ebx + xorl %ebx,%r11d + leal -701558691(%rax,%r10,1),%eax + andl %edx,%r11d + xorl %ecx,%r11d + movl 40(%rsi),%r10d + addl %r11d,%eax + roll $5,%eax + movl %ebx,%r11d + addl %ebx,%eax + xorl %eax,%r11d + leal 38016083(%rdx,%r10,1),%edx + andl %ecx,%r11d + xorl %ebx,%r11d + movl 60(%rsi),%r10d + addl %r11d,%edx + roll $9,%edx + movl %eax,%r11d + addl %eax,%edx + xorl %edx,%r11d + leal -660478335(%rcx,%r10,1),%ecx + andl %ebx,%r11d + xorl %eax,%r11d + movl 16(%rsi),%r10d + addl %r11d,%ecx + roll $14,%ecx + movl %edx,%r11d + addl %edx,%ecx + xorl %ecx,%r11d + leal -405537848(%rbx,%r10,1),%ebx + andl %eax,%r11d + xorl %edx,%r11d + movl 36(%rsi),%r10d + addl %r11d,%ebx + roll $20,%ebx + movl %ecx,%r11d + addl %ecx,%ebx + xorl %ebx,%r11d + leal 568446438(%rax,%r10,1),%eax + andl %edx,%r11d + xorl %ecx,%r11d + movl 56(%rsi),%r10d + addl %r11d,%eax + roll $5,%eax + movl %ebx,%r11d + addl %ebx,%eax + xorl %eax,%r11d + leal -1019803690(%rdx,%r10,1),%edx + andl %ecx,%r11d + xorl %ebx,%r11d + movl 12(%rsi),%r10d + addl %r11d,%edx + roll $9,%edx + movl %eax,%r11d + addl %eax,%edx + xorl %edx,%r11d + leal -187363961(%rcx,%r10,1),%ecx + andl %ebx,%r11d + xorl %eax,%r11d + movl 32(%rsi),%r10d + addl %r11d,%ecx + roll $14,%ecx + movl %edx,%r11d + addl %edx,%ecx + xorl %ecx,%r11d + leal 1163531501(%rbx,%r10,1),%ebx + andl %eax,%r11d + xorl %edx,%r11d + movl 52(%rsi),%r10d + addl %r11d,%ebx + roll $20,%ebx + movl %ecx,%r11d + addl %ecx,%ebx + xorl %ebx,%r11d + leal -1444681467(%rax,%r10,1),%eax + andl %edx,%r11d + xorl %ecx,%r11d + movl 8(%rsi),%r10d + addl %r11d,%eax + roll $5,%eax + movl %ebx,%r11d + addl %ebx,%eax + xorl %eax,%r11d + leal -51403784(%rdx,%r10,1),%edx + andl %ecx,%r11d + xorl %ebx,%r11d + movl 28(%rsi),%r10d + addl %r11d,%edx + roll $9,%edx + movl %eax,%r11d + addl %eax,%edx + xorl %edx,%r11d + leal 1735328473(%rcx,%r10,1),%ecx + andl %ebx,%r11d + xorl %eax,%r11d + movl 48(%rsi),%r10d + addl %r11d,%ecx + roll $14,%ecx + movl %edx,%r11d + addl %edx,%ecx + xorl %ecx,%r11d + leal -1926607734(%rbx,%r10,1),%ebx + andl %eax,%r11d + xorl %edx,%r11d + movl 0(%rsi),%r10d + addl %r11d,%ebx + roll $20,%ebx + movl %ecx,%r11d + addl %ecx,%ebx + movl 20(%rsi),%r10d + movl %ecx,%r11d + leal -378558(%rax,%r10,1),%eax + movl 32(%rsi),%r10d + xorl %edx,%r11d + xorl %ebx,%r11d + addl %r11d,%eax + roll $4,%eax + movl %ebx,%r11d + addl %ebx,%eax + leal -2022574463(%rdx,%r10,1),%edx + movl 44(%rsi),%r10d + xorl %ecx,%r11d + xorl %eax,%r11d + addl %r11d,%edx + roll $11,%edx + movl %eax,%r11d + addl %eax,%edx + leal 1839030562(%rcx,%r10,1),%ecx + movl 56(%rsi),%r10d + xorl %ebx,%r11d + xorl %edx,%r11d + addl %r11d,%ecx + roll $16,%ecx + movl %edx,%r11d + addl %edx,%ecx + leal -35309556(%rbx,%r10,1),%ebx + movl 4(%rsi),%r10d + xorl %eax,%r11d + xorl %ecx,%r11d + addl %r11d,%ebx + roll $23,%ebx + movl %ecx,%r11d + addl %ecx,%ebx + leal -1530992060(%rax,%r10,1),%eax + movl 16(%rsi),%r10d + xorl %edx,%r11d + xorl %ebx,%r11d + addl %r11d,%eax + roll $4,%eax + movl %ebx,%r11d + addl %ebx,%eax + leal 1272893353(%rdx,%r10,1),%edx + movl 28(%rsi),%r10d + xorl %ecx,%r11d + xorl %eax,%r11d + addl %r11d,%edx + roll $11,%edx + movl %eax,%r11d + addl %eax,%edx + leal -155497632(%rcx,%r10,1),%ecx + movl 40(%rsi),%r10d + xorl %ebx,%r11d + xorl %edx,%r11d + addl %r11d,%ecx + roll $16,%ecx + movl %edx,%r11d + addl %edx,%ecx + leal -1094730640(%rbx,%r10,1),%ebx + movl 52(%rsi),%r10d + xorl %eax,%r11d + xorl %ecx,%r11d + addl %r11d,%ebx + roll $23,%ebx + movl %ecx,%r11d + addl %ecx,%ebx + leal 681279174(%rax,%r10,1),%eax + movl 0(%rsi),%r10d + xorl %edx,%r11d + xorl %ebx,%r11d + addl %r11d,%eax + roll $4,%eax + movl %ebx,%r11d + addl %ebx,%eax + leal -358537222(%rdx,%r10,1),%edx + movl 12(%rsi),%r10d + xorl %ecx,%r11d + xorl %eax,%r11d + addl %r11d,%edx + roll $11,%edx + movl %eax,%r11d + addl %eax,%edx + leal -722521979(%rcx,%r10,1),%ecx + movl 24(%rsi),%r10d + xorl %ebx,%r11d + xorl %edx,%r11d + addl %r11d,%ecx + roll $16,%ecx + movl %edx,%r11d + addl %edx,%ecx + leal 76029189(%rbx,%r10,1),%ebx + movl 36(%rsi),%r10d + xorl %eax,%r11d + xorl %ecx,%r11d + addl %r11d,%ebx + roll $23,%ebx + movl %ecx,%r11d + addl %ecx,%ebx + leal -640364487(%rax,%r10,1),%eax + movl 48(%rsi),%r10d + xorl %edx,%r11d + xorl %ebx,%r11d + addl %r11d,%eax + roll $4,%eax + movl %ebx,%r11d + addl %ebx,%eax + leal -421815835(%rdx,%r10,1),%edx + movl 60(%rsi),%r10d + xorl %ecx,%r11d + xorl %eax,%r11d + addl %r11d,%edx + roll $11,%edx + movl %eax,%r11d + addl %eax,%edx + leal 530742520(%rcx,%r10,1),%ecx + movl 8(%rsi),%r10d + xorl %ebx,%r11d + xorl %edx,%r11d + addl %r11d,%ecx + roll $16,%ecx + movl %edx,%r11d + addl %edx,%ecx + leal -995338651(%rbx,%r10,1),%ebx + movl 0(%rsi),%r10d + xorl %eax,%r11d + xorl %ecx,%r11d + addl %r11d,%ebx + roll $23,%ebx + movl %ecx,%r11d + addl %ecx,%ebx + movl 0(%rsi),%r10d + movl $4294967295,%r11d + xorl %edx,%r11d + leal -198630844(%rax,%r10,1),%eax + orl %ebx,%r11d + xorl %ecx,%r11d + addl %r11d,%eax + movl 28(%rsi),%r10d + movl $4294967295,%r11d + roll $6,%eax + xorl %ecx,%r11d + addl %ebx,%eax + leal 1126891415(%rdx,%r10,1),%edx + orl %eax,%r11d + xorl %ebx,%r11d + addl %r11d,%edx + movl 56(%rsi),%r10d + movl $4294967295,%r11d + roll $10,%edx + xorl %ebx,%r11d + addl %eax,%edx + leal -1416354905(%rcx,%r10,1),%ecx + orl %edx,%r11d + xorl %eax,%r11d + addl %r11d,%ecx + movl 20(%rsi),%r10d + movl $4294967295,%r11d + roll $15,%ecx + xorl %eax,%r11d + addl %edx,%ecx + leal -57434055(%rbx,%r10,1),%ebx + orl %ecx,%r11d + xorl %edx,%r11d + addl %r11d,%ebx + movl 48(%rsi),%r10d + movl $4294967295,%r11d + roll $21,%ebx + xorl %edx,%r11d + addl %ecx,%ebx + leal 1700485571(%rax,%r10,1),%eax + orl %ebx,%r11d + xorl %ecx,%r11d + addl %r11d,%eax + movl 12(%rsi),%r10d + movl $4294967295,%r11d + roll $6,%eax + xorl %ecx,%r11d + addl %ebx,%eax + leal -1894986606(%rdx,%r10,1),%edx + orl %eax,%r11d + xorl %ebx,%r11d + addl %r11d,%edx + movl 40(%rsi),%r10d + movl $4294967295,%r11d + roll $10,%edx + xorl %ebx,%r11d + addl %eax,%edx + leal -1051523(%rcx,%r10,1),%ecx + orl %edx,%r11d + xorl %eax,%r11d + addl %r11d,%ecx + movl 4(%rsi),%r10d + movl $4294967295,%r11d + roll $15,%ecx + xorl %eax,%r11d + addl %edx,%ecx + leal -2054922799(%rbx,%r10,1),%ebx + orl %ecx,%r11d + xorl %edx,%r11d + addl %r11d,%ebx + movl 32(%rsi),%r10d + movl $4294967295,%r11d + roll $21,%ebx + xorl %edx,%r11d + addl %ecx,%ebx + leal 1873313359(%rax,%r10,1),%eax + orl %ebx,%r11d + xorl %ecx,%r11d + addl %r11d,%eax + movl 60(%rsi),%r10d + movl $4294967295,%r11d + roll $6,%eax + xorl %ecx,%r11d + addl %ebx,%eax + leal -30611744(%rdx,%r10,1),%edx + orl %eax,%r11d + xorl %ebx,%r11d + addl %r11d,%edx + movl 24(%rsi),%r10d + movl $4294967295,%r11d + roll $10,%edx + xorl %ebx,%r11d + addl %eax,%edx + leal -1560198380(%rcx,%r10,1),%ecx + orl %edx,%r11d + xorl %eax,%r11d + addl %r11d,%ecx + movl 52(%rsi),%r10d + movl $4294967295,%r11d + roll $15,%ecx + xorl %eax,%r11d + addl %edx,%ecx + leal 1309151649(%rbx,%r10,1),%ebx + orl %ecx,%r11d + xorl %edx,%r11d + addl %r11d,%ebx + movl 16(%rsi),%r10d + movl $4294967295,%r11d + roll $21,%ebx + xorl %edx,%r11d + addl %ecx,%ebx + leal -145523070(%rax,%r10,1),%eax + orl %ebx,%r11d + xorl %ecx,%r11d + addl %r11d,%eax + movl 44(%rsi),%r10d + movl $4294967295,%r11d + roll $6,%eax + xorl %ecx,%r11d + addl %ebx,%eax + leal -1120210379(%rdx,%r10,1),%edx + orl %eax,%r11d + xorl %ebx,%r11d + addl %r11d,%edx + movl 8(%rsi),%r10d + movl $4294967295,%r11d + roll $10,%edx + xorl %ebx,%r11d + addl %eax,%edx + leal 718787259(%rcx,%r10,1),%ecx + orl %edx,%r11d + xorl %eax,%r11d + addl %r11d,%ecx + movl 36(%rsi),%r10d + movl $4294967295,%r11d + roll $15,%ecx + xorl %eax,%r11d + addl %edx,%ecx + leal -343485551(%rbx,%r10,1),%ebx + orl %ecx,%r11d + xorl %edx,%r11d + addl %r11d,%ebx + movl 0(%rsi),%r10d + movl $4294967295,%r11d + roll $21,%ebx + xorl %edx,%r11d + addl %ecx,%ebx + + addl %r8d,%eax + addl %r9d,%ebx + addl %r14d,%ecx + addl %r15d,%edx + + + addq $64,%rsi + cmpq %rdi,%rsi + jb .Lloop + + +.Lend: + movl %eax,0(%rbp) + movl %ebx,4(%rbp) + movl %ecx,8(%rbp) + movl %edx,12(%rbp) + + popq %r15 + popq %r14 + popq %rbx + popq %rbp + .byte 0xf3,0xc3 +.size md5_block_asm_data_order,.-md5_block_asm_data_order Property changes on: secure/lib/libcrypto/amd64/md5-x86_64.S ___________________________________________________________________ Added: svn:mime-type + text/plain Added: svn:keywords + FreeBSD=%H Added: svn:eol-style + native Index: secure/lib/libcrypto/amd64/x86_64cpuid.S =================================================================== --- secure/lib/libcrypto/amd64/x86_64cpuid.S (revision 0) +++ secure/lib/libcrypto/amd64/x86_64cpuid.S (revision 0) @@ -0,0 +1,109 @@ + # $FreeBSD$ +.text + +.globl OPENSSL_atomic_add +.type OPENSSL_atomic_add,@function +.align 16 +OPENSSL_atomic_add: + movl (%rdi),%eax +.Lspin: leaq (%rsi,%rax),%r8 +lock; cmpxchgl %r8d,(%rdi) + jne .Lspin + movl %r8d,%eax + .byte 0x48,0x98 + ret +.size OPENSSL_atomic_add,.-OPENSSL_atomic_add + +.globl OPENSSL_wipe_cpu +.type OPENSSL_wipe_cpu,@function +.align 16 +OPENSSL_wipe_cpu: + pxor %xmm0,%xmm0 + pxor %xmm1,%xmm1 + pxor %xmm2,%xmm2 + pxor %xmm3,%xmm3 + pxor %xmm4,%xmm4 + pxor %xmm5,%xmm5 + pxor %xmm6,%xmm6 + pxor %xmm7,%xmm7 + pxor %xmm8,%xmm8 + pxor %xmm9,%xmm9 + pxor %xmm10,%xmm10 + pxor %xmm11,%xmm11 + pxor %xmm12,%xmm12 + pxor %xmm13,%xmm13 + pxor %xmm14,%xmm14 + pxor %xmm15,%xmm15 + xorq %rcx,%rcx + xorq %rdx,%rdx + xorq %rsi,%rsi + xorq %rdi,%rdi + xorq %r8,%r8 + xorq %r9,%r9 + xorq %r10,%r10 + xorq %r11,%r11 + leaq 8(%rsp),%rax + ret +.size OPENSSL_wipe_cpu,.-OPENSSL_wipe_cpu + +.extern OPENSSL_cpuid_setup +.hidden OPENSSL_cpuid_setup +.section .init + call OPENSSL_cpuid_setup + +.hidden OPENSSL_ia32cap_P +.comm OPENSSL_ia32cap_P,8 +.text + +.globl OPENSSL_rdtsc +.type OPENSSL_rdtsc,@function +.align 16 +OPENSSL_rdtsc: + rdtsc + shlq $32,%rdx + orq %rdx,%rax + .byte 0xf3,0xc3 +.size OPENSSL_rdtsc,.-OPENSSL_rdtsc + +.globl OPENSSL_ia32_cpuid +.type OPENSSL_ia32_cpuid,@function +.align 16 +OPENSSL_ia32_cpuid: + movq %rbx,%r8 + + xorl %eax,%eax + cpuid + xorl %eax,%eax + cmpl $1970169159,%ebx + setne %al + movl %eax,%r9d + cmpl $1231384169,%edx + setne %al + orl %eax,%r9d + cmpl $1818588270,%ecx + setne %al + orl %eax,%r9d + + movl $1,%eax + cpuid + cmpl $0,%r9d + jne .Lnotintel + orl $1048576,%edx + andb $15,%ah + cmpb $15,%ah + je .Lnotintel + orl $1073741824,%edx +.Lnotintel: + btl $28,%edx + jnc .Ldone + shrl $16,%ebx + cmpb $1,%bl + ja .Ldone + andl $4026531839,%edx +.Ldone: + shlq $32,%rcx + movl %edx,%eax + movq %r8,%rbx + orq %rcx,%rax + .byte 0xf3,0xc3 +.size OPENSSL_ia32_cpuid,.-OPENSSL_ia32_cpuid Property changes on: secure/lib/libcrypto/amd64/x86_64cpuid.S ___________________________________________________________________ Added: svn:mime-type + text/plain Added: svn:keywords + FreeBSD=%H Added: svn:eol-style + native Index: secure/lib/libcrypto/amd64/aes-x86_64.S =================================================================== --- secure/lib/libcrypto/amd64/aes-x86_64.S (revision 0) +++ secure/lib/libcrypto/amd64/aes-x86_64.S (revision 0) @@ -0,0 +1,1682 @@ + # $FreeBSD$ +.text +.type _x86_64_AES_encrypt,@function +.align 16 +_x86_64_AES_encrypt: + xorl 0(%r15),%eax + xorl 4(%r15),%ebx + xorl 8(%r15),%ecx + xorl 12(%r15),%edx + + movl 240(%r15),%r13d + subl $1,%r13d + jmp .Lenc_loop +.align 16 +.Lenc_loop: + + movzbl %al,%esi + movzbl %bl,%edi + movzbl %cl,%ebp + movl 0(%r14,%rsi,8),%r10d + movl 0(%r14,%rdi,8),%r11d + movl 0(%r14,%rbp,8),%r12d + + movzbl %bh,%esi + movzbl %ch,%edi + movzbl %dl,%ebp + xorl 3(%r14,%rsi,8),%r10d + xorl 3(%r14,%rdi,8),%r11d + movl 0(%r14,%rbp,8),%r8d + + movzbl %dh,%esi + shrl $16,%ecx + movzbl %ah,%ebp + xorl 3(%r14,%rsi,8),%r12d + shrl $16,%edx + xorl 3(%r14,%rbp,8),%r8d + + shrl $16,%ebx + leaq 16(%r15),%r15 + shrl $16,%eax + + movzbl %cl,%esi + movzbl %dl,%edi + movzbl %al,%ebp + xorl 2(%r14,%rsi,8),%r10d + xorl 2(%r14,%rdi,8),%r11d + xorl 2(%r14,%rbp,8),%r12d + + movzbl %dh,%esi + movzbl %ah,%edi + movzbl %bl,%ebp + xorl 1(%r14,%rsi,8),%r10d + xorl 1(%r14,%rdi,8),%r11d + xorl 2(%r14,%rbp,8),%r8d + + movl 12(%r15),%edx + movzbl %bh,%edi + movzbl %ch,%ebp + movl 0(%r15),%eax + xorl 1(%r14,%rdi,8),%r12d + xorl 1(%r14,%rbp,8),%r8d + + movl 4(%r15),%ebx + movl 8(%r15),%ecx + xorl %r10d,%eax + xorl %r11d,%ebx + xorl %r12d,%ecx + xorl %r8d,%edx + subl $1,%r13d + jnz .Lenc_loop + movzbl %al,%esi + movzbl %bl,%edi + movzbl %cl,%ebp + movl 2(%r14,%rsi,8),%r10d + movl 2(%r14,%rdi,8),%r11d + movl 2(%r14,%rbp,8),%r12d + + andl $255,%r10d + andl $255,%r11d + andl $255,%r12d + + movzbl %dl,%esi + movzbl %bh,%edi + movzbl %ch,%ebp + movl 2(%r14,%rsi,8),%r8d + movl 0(%r14,%rdi,8),%edi + movl 0(%r14,%rbp,8),%ebp + + andl $255,%r8d + andl $65280,%edi + andl $65280,%ebp + + xorl %edi,%r10d + xorl %ebp,%r11d + shrl $16,%ecx + + movzbl %dh,%esi + movzbl %ah,%edi + shrl $16,%edx + movl 0(%r14,%rsi,8),%esi + movl 0(%r14,%rdi,8),%edi + + andl $65280,%esi + andl $65280,%edi + shrl $16,%ebx + xorl %esi,%r12d + xorl %edi,%r8d + shrl $16,%eax + + movzbl %cl,%esi + movzbl %dl,%edi + movzbl %al,%ebp + movl 0(%r14,%rsi,8),%esi + movl 0(%r14,%rdi,8),%edi + movl 0(%r14,%rbp,8),%ebp + + andl $16711680,%esi + andl $16711680,%edi + andl $16711680,%ebp + + xorl %esi,%r10d + xorl %edi,%r11d + xorl %ebp,%r12d + + movzbl %bl,%esi + movzbl %dh,%edi + movzbl %ah,%ebp + movl 0(%r14,%rsi,8),%esi + movl 2(%r14,%rdi,8),%edi + movl 2(%r14,%rbp,8),%ebp + + andl $16711680,%esi + andl $4278190080,%edi + andl $4278190080,%ebp + + xorl %esi,%r8d + xorl %edi,%r10d + xorl %ebp,%r11d + + movzbl %bh,%esi + movzbl %ch,%edi + movl 16+12(%r15),%edx + movl 2(%r14,%rsi,8),%esi + movl 2(%r14,%rdi,8),%edi + movl 16+0(%r15),%eax + + andl $4278190080,%esi + andl $4278190080,%edi + + xorl %esi,%r12d + xorl %edi,%r8d + + movl 16+4(%r15),%ebx + movl 16+8(%r15),%ecx + xorl %r10d,%eax + xorl %r11d,%ebx + xorl %r12d,%ecx + xorl %r8d,%edx +.byte 0xf3,0xc3 +.size _x86_64_AES_encrypt,.-_x86_64_AES_encrypt +.globl AES_encrypt +.type AES_encrypt,@function +.align 16 +AES_encrypt: + pushq %rbx + pushq %rbp + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + + movq %rdx,%r15 + movq %rdi,%r8 + movq %rsi,%r9 + + .long 0x1358d4c,0x90000000 + leaq AES_Te-.(%r14),%r14 + + movl 0(%r8),%eax + movl 4(%r8),%ebx + movl 8(%r8),%ecx + movl 12(%r8),%edx + + call _x86_64_AES_encrypt + + movl %eax,0(%r9) + movl %ebx,4(%r9) + movl %ecx,8(%r9) + movl %edx,12(%r9) + + popq %r15 + popq %r14 + popq %r13 + popq %r12 + popq %rbp + popq %rbx + .byte 0xf3,0xc3 +.size AES_encrypt,.-AES_encrypt +.type _x86_64_AES_decrypt,@function +.align 16 +_x86_64_AES_decrypt: + xorl 0(%r15),%eax + xorl 4(%r15),%ebx + xorl 8(%r15),%ecx + xorl 12(%r15),%edx + + movl 240(%r15),%r13d + subl $1,%r13d + jmp .Ldec_loop +.align 16 +.Ldec_loop: + + movzbl %al,%esi + movzbl %bl,%edi + movzbl %cl,%ebp + movl 0(%r14,%rsi,8),%r10d + movl 0(%r14,%rdi,8),%r11d + movl 0(%r14,%rbp,8),%r12d + + movzbl %dh,%esi + movzbl %ah,%edi + movzbl %dl,%ebp + xorl 3(%r14,%rsi,8),%r10d + xorl 3(%r14,%rdi,8),%r11d + movl 0(%r14,%rbp,8),%r8d + + movzbl %bh,%esi + shrl $16,%eax + movzbl %ch,%ebp + xorl 3(%r14,%rsi,8),%r12d + shrl $16,%edx + xorl 3(%r14,%rbp,8),%r8d + + shrl $16,%ebx + leaq 16(%r15),%r15 + shrl $16,%ecx + + movzbl %cl,%esi + movzbl %dl,%edi + movzbl %al,%ebp + xorl 2(%r14,%rsi,8),%r10d + xorl 2(%r14,%rdi,8),%r11d + xorl 2(%r14,%rbp,8),%r12d + + movzbl %bh,%esi + movzbl %ch,%edi + movzbl %bl,%ebp + xorl 1(%r14,%rsi,8),%r10d + xorl 1(%r14,%rdi,8),%r11d + xorl 2(%r14,%rbp,8),%r8d + + movzbl %dh,%esi + movl 12(%r15),%edx + movzbl %ah,%ebp + xorl 1(%r14,%rsi,8),%r12d + movl 0(%r15),%eax + xorl 1(%r14,%rbp,8),%r8d + + xorl %r10d,%eax + movl 4(%r15),%ebx + movl 8(%r15),%ecx + xorl %r12d,%ecx + xorl %r11d,%ebx + xorl %r8d,%edx + subl $1,%r13d + jnz .Ldec_loop + movzbl %al,%esi + movzbl %bl,%edi + movzbl %cl,%ebp + movzbl 2048(%r14,%rsi,1),%r10d + movzbl 2048(%r14,%rdi,1),%r11d + movzbl 2048(%r14,%rbp,1),%r12d + + movzbl %dl,%esi + movzbl %dh,%edi + movzbl %ah,%ebp + movzbl 2048(%r14,%rsi,1),%r8d + movzbl 2048(%r14,%rdi,1),%edi + movzbl 2048(%r14,%rbp,1),%ebp + + shll $8,%edi + shll $8,%ebp + + xorl %edi,%r10d + xorl %ebp,%r11d + shrl $16,%edx + + movzbl %bh,%esi + movzbl %ch,%edi + shrl $16,%eax + movzbl 2048(%r14,%rsi,1),%esi + movzbl 2048(%r14,%rdi,1),%edi + + shll $8,%esi + shll $8,%edi + shrl $16,%ebx + xorl %esi,%r12d + xorl %edi,%r8d + shrl $16,%ecx + + movzbl %cl,%esi + movzbl %dl,%edi + movzbl %al,%ebp + movzbl 2048(%r14,%rsi,1),%esi + movzbl 2048(%r14,%rdi,1),%edi + movzbl 2048(%r14,%rbp,1),%ebp + + shll $16,%esi + shll $16,%edi + shll $16,%ebp + + xorl %esi,%r10d + xorl %edi,%r11d + xorl %ebp,%r12d + + movzbl %bl,%esi + movzbl %bh,%edi + movzbl %ch,%ebp + movzbl 2048(%r14,%rsi,1),%esi + movzbl 2048(%r14,%rdi,1),%edi + movzbl 2048(%r14,%rbp,1),%ebp + + shll $16,%esi + shll $24,%edi + shll $24,%ebp + + xorl %esi,%r8d + xorl %edi,%r10d + xorl %ebp,%r11d + + movzbl %dh,%esi + movzbl %ah,%edi + movl 16+12(%r15),%edx + movzbl 2048(%r14,%rsi,1),%esi + movzbl 2048(%r14,%rdi,1),%edi + movl 16+0(%r15),%eax + + shll $24,%esi + shll $24,%edi + + xorl %esi,%r12d + xorl %edi,%r8d + + movl 16+4(%r15),%ebx + movl 16+8(%r15),%ecx + xorl %r10d,%eax + xorl %r11d,%ebx + xorl %r12d,%ecx + xorl %r8d,%edx +.byte 0xf3,0xc3 +.size _x86_64_AES_decrypt,.-_x86_64_AES_decrypt +.globl AES_decrypt +.type AES_decrypt,@function +.align 16 +AES_decrypt: + pushq %rbx + pushq %rbp + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + + movq %rdx,%r15 + movq %rdi,%r8 + movq %rsi,%r9 + + .long 0x1358d4c,0x90000000 + leaq AES_Td-.(%r14),%r14 + + + leaq 2048+128(%r14),%r14; + movl 0-128(%r14),%eax + movl 32-128(%r14),%ebx + movl 64-128(%r14),%ecx + movl 96-128(%r14),%edx + movl 128-128(%r14),%eax + movl 160-128(%r14),%ebx + movl 192-128(%r14),%ecx + movl 224-128(%r14),%edx + leaq -2048-128(%r14),%r14; + + movl 0(%r8),%eax + movl 4(%r8),%ebx + movl 8(%r8),%ecx + movl 12(%r8),%edx + + call _x86_64_AES_decrypt + + movl %eax,0(%r9) + movl %ebx,4(%r9) + movl %ecx,8(%r9) + movl %edx,12(%r9) + + popq %r15 + popq %r14 + popq %r13 + popq %r12 + popq %rbp + popq %rbx + .byte 0xf3,0xc3 +.size AES_decrypt,.-AES_decrypt +.globl AES_set_encrypt_key +.type AES_set_encrypt_key,@function +.align 16 +AES_set_encrypt_key: + pushq %rbx + pushq %rbp + subq $8,%rsp + + call _x86_64_AES_set_encrypt_key + + movq 8(%rsp),%rbp + movq 16(%rsp),%rbx + addq $24,%rsp + .byte 0xf3,0xc3 +.size AES_set_encrypt_key,.-AES_set_encrypt_key + +.type _x86_64_AES_set_encrypt_key,@function +.align 16 +_x86_64_AES_set_encrypt_key: + movl %esi,%ecx + movq %rdi,%rsi + movq %rdx,%rdi + + testq $-1,%rsi + jz .Lbadpointer + testq $-1,%rdi + jz .Lbadpointer + + .long 0x12d8d48,0x90000000 + leaq AES_Te-.(%rbp),%rbp + + cmpl $128,%ecx + je .L10rounds + cmpl $192,%ecx + je .L12rounds + cmpl $256,%ecx + je .L14rounds + movq $-2,%rax + jmp .Lexit + +.L10rounds: + movl 0(%rsi),%eax + movl 4(%rsi),%ebx + movl 8(%rsi),%ecx + movl 12(%rsi),%edx + movl %eax,0(%rdi) + movl %ebx,4(%rdi) + movl %ecx,8(%rdi) + movl %edx,12(%rdi) + + xorl %ecx,%ecx + jmp .L10shortcut +.align 4 +.L10loop: + movl 0(%rdi),%eax + movl 12(%rdi),%edx +.L10shortcut: + movzbl %dl,%esi + movl 2(%rbp,%rsi,8),%ebx + movzbl %dh,%esi + andl $4278190080,%ebx + xorl %ebx,%eax + + movl 2(%rbp,%rsi,8),%ebx + shrl $16,%edx + andl $255,%ebx + movzbl %dl,%esi + xorl %ebx,%eax + + movl 0(%rbp,%rsi,8),%ebx + movzbl %dh,%esi + andl $65280,%ebx + xorl %ebx,%eax + + movl 0(%rbp,%rsi,8),%ebx + andl $16711680,%ebx + xorl %ebx,%eax + + xorl 2048(%rbp,%rcx,4),%eax + movl %eax,16(%rdi) + xorl 4(%rdi),%eax + movl %eax,20(%rdi) + xorl 8(%rdi),%eax + movl %eax,24(%rdi) + xorl 12(%rdi),%eax + movl %eax,28(%rdi) + addl $1,%ecx + leaq 16(%rdi),%rdi + cmpl $10,%ecx + jl .L10loop + + movl $10,80(%rdi) + xorq %rax,%rax + jmp .Lexit + +.L12rounds: + movl 0(%rsi),%eax + movl 4(%rsi),%ebx + movl 8(%rsi),%ecx + movl 12(%rsi),%edx + movl %eax,0(%rdi) + movl %ebx,4(%rdi) + movl %ecx,8(%rdi) + movl %edx,12(%rdi) + movl 16(%rsi),%ecx + movl 20(%rsi),%edx + movl %ecx,16(%rdi) + movl %edx,20(%rdi) + + xorl %ecx,%ecx + jmp .L12shortcut +.align 4 +.L12loop: + movl 0(%rdi),%eax + movl 20(%rdi),%edx +.L12shortcut: + movzbl %dl,%esi + movl 2(%rbp,%rsi,8),%ebx + movzbl %dh,%esi + andl $4278190080,%ebx + xorl %ebx,%eax + + movl 2(%rbp,%rsi,8),%ebx + shrl $16,%edx + andl $255,%ebx + movzbl %dl,%esi + xorl %ebx,%eax + + movl 0(%rbp,%rsi,8),%ebx + movzbl %dh,%esi + andl $65280,%ebx + xorl %ebx,%eax + + movl 0(%rbp,%rsi,8),%ebx + andl $16711680,%ebx + xorl %ebx,%eax + + xorl 2048(%rbp,%rcx,4),%eax + movl %eax,24(%rdi) + xorl 4(%rdi),%eax + movl %eax,28(%rdi) + xorl 8(%rdi),%eax + movl %eax,32(%rdi) + xorl 12(%rdi),%eax + movl %eax,36(%rdi) + + cmpl $7,%ecx + je .L12break + addl $1,%ecx + + xorl 16(%rdi),%eax + movl %eax,40(%rdi) + xorl 20(%rdi),%eax + movl %eax,44(%rdi) + + leaq 24(%rdi),%rdi + jmp .L12loop +.L12break: + movl $12,72(%rdi) + xorq %rax,%rax + jmp .Lexit + +.L14rounds: + movl 0(%rsi),%eax + movl 4(%rsi),%ebx + movl 8(%rsi),%ecx + movl 12(%rsi),%edx + movl %eax,0(%rdi) + movl %ebx,4(%rdi) + movl %ecx,8(%rdi) + movl %edx,12(%rdi) + movl 16(%rsi),%eax + movl 20(%rsi),%ebx + movl 24(%rsi),%ecx + movl 28(%rsi),%edx + movl %eax,16(%rdi) + movl %ebx,20(%rdi) + movl %ecx,24(%rdi) + movl %edx,28(%rdi) + + xorl %ecx,%ecx + jmp .L14shortcut +.align 4 +.L14loop: + movl 28(%rdi),%edx +.L14shortcut: + movl 0(%rdi),%eax + movzbl %dl,%esi + movl 2(%rbp,%rsi,8),%ebx + movzbl %dh,%esi + andl $4278190080,%ebx + xorl %ebx,%eax + + movl 2(%rbp,%rsi,8),%ebx + shrl $16,%edx + andl $255,%ebx + movzbl %dl,%esi + xorl %ebx,%eax + + movl 0(%rbp,%rsi,8),%ebx + movzbl %dh,%esi + andl $65280,%ebx + xorl %ebx,%eax + + movl 0(%rbp,%rsi,8),%ebx + andl $16711680,%ebx + xorl %ebx,%eax + + xorl 2048(%rbp,%rcx,4),%eax + movl %eax,32(%rdi) + xorl 4(%rdi),%eax + movl %eax,36(%rdi) + xorl 8(%rdi),%eax + movl %eax,40(%rdi) + xorl 12(%rdi),%eax + movl %eax,44(%rdi) + + cmpl $6,%ecx + je .L14break + addl $1,%ecx + + movl %eax,%edx + movl 16(%rdi),%eax + movzbl %dl,%esi + movl 2(%rbp,%rsi,8),%ebx + movzbl %dh,%esi + andl $255,%ebx + xorl %ebx,%eax + + movl 0(%rbp,%rsi,8),%ebx + shrl $16,%edx + andl $65280,%ebx + movzbl %dl,%esi + xorl %ebx,%eax + + movl 0(%rbp,%rsi,8),%ebx + movzbl %dh,%esi + andl $16711680,%ebx + xorl %ebx,%eax + + movl 2(%rbp,%rsi,8),%ebx + andl $4278190080,%ebx + xorl %ebx,%eax + + movl %eax,48(%rdi) + xorl 20(%rdi),%eax + movl %eax,52(%rdi) + xorl 24(%rdi),%eax + movl %eax,56(%rdi) + xorl 28(%rdi),%eax + movl %eax,60(%rdi) + + leaq 32(%rdi),%rdi + jmp .L14loop +.L14break: + movl $14,48(%rdi) + xorq %rax,%rax + jmp .Lexit + +.Lbadpointer: + movq $-1,%rax +.Lexit: +.byte 0xf3,0xc3 +.size _x86_64_AES_set_encrypt_key,.-_x86_64_AES_set_encrypt_key +.globl AES_set_decrypt_key +.type AES_set_decrypt_key,@function +.align 16 +AES_set_decrypt_key: + pushq %rbx + pushq %rbp + pushq %rdx + + call _x86_64_AES_set_encrypt_key + movq (%rsp),%r8 + cmpl $0,%eax + jne .Labort + + movl 240(%r8),%ecx + xorq %rdi,%rdi + leaq (%rdi,%rcx,4),%rcx + movq %r8,%rsi + leaq (%r8,%rcx,4),%rdi +.align 4 +.Linvert: + movq 0(%rsi),%rax + movq 8(%rsi),%rbx + movq 0(%rdi),%rcx + movq 8(%rdi),%rdx + movq %rax,0(%rdi) + movq %rbx,8(%rdi) + movq %rcx,0(%rsi) + movq %rdx,8(%rsi) + leaq 16(%rsi),%rsi + leaq -16(%rdi),%rdi + cmpq %rsi,%rdi + jne .Linvert + + .long 0x10d8d4c,0x90000000 + leaq AES_Td-.(%r9),%rdi + leaq AES_Te-AES_Td(%rdi),%r9 + + movq %r8,%rsi + movl 240(%r8),%ecx + subl $1,%ecx +.align 4 +.Lpermute: + leaq 16(%rsi),%rsi + movl 0(%rsi),%eax + movl %eax,%edx + movzbl %ah,%ebx + shrl $16,%edx + andl $255,%eax + movzbq 2(%r9,%rax,8),%rax + movzbq 2(%r9,%rbx,8),%rbx + movl 0(%rdi,%rax,8),%eax + xorl 3(%rdi,%rbx,8),%eax + movzbl %dh,%ebx + andl $255,%edx + movzbq 2(%r9,%rdx,8),%rdx + movzbq 2(%r9,%rbx,8),%rbx + xorl 2(%rdi,%rdx,8),%eax + xorl 1(%rdi,%rbx,8),%eax + movl %eax,0(%rsi) + movl 4(%rsi),%eax + movl %eax,%edx + movzbl %ah,%ebx + shrl $16,%edx + andl $255,%eax + movzbq 2(%r9,%rax,8),%rax + movzbq 2(%r9,%rbx,8),%rbx + movl 0(%rdi,%rax,8),%eax + xorl 3(%rdi,%rbx,8),%eax + movzbl %dh,%ebx + andl $255,%edx + movzbq 2(%r9,%rdx,8),%rdx + movzbq 2(%r9,%rbx,8),%rbx + xorl 2(%rdi,%rdx,8),%eax + xorl 1(%rdi,%rbx,8),%eax + movl %eax,4(%rsi) + movl 8(%rsi),%eax + movl %eax,%edx + movzbl %ah,%ebx + shrl $16,%edx + andl $255,%eax + movzbq 2(%r9,%rax,8),%rax + movzbq 2(%r9,%rbx,8),%rbx + movl 0(%rdi,%rax,8),%eax + xorl 3(%rdi,%rbx,8),%eax + movzbl %dh,%ebx + andl $255,%edx + movzbq 2(%r9,%rdx,8),%rdx + movzbq 2(%r9,%rbx,8),%rbx + xorl 2(%rdi,%rdx,8),%eax + xorl 1(%rdi,%rbx,8),%eax + movl %eax,8(%rsi) + movl 12(%rsi),%eax + movl %eax,%edx + movzbl %ah,%ebx + shrl $16,%edx + andl $255,%eax + movzbq 2(%r9,%rax,8),%rax + movzbq 2(%r9,%rbx,8),%rbx + movl 0(%rdi,%rax,8),%eax + xorl 3(%rdi,%rbx,8),%eax + movzbl %dh,%ebx + andl $255,%edx + movzbq 2(%r9,%rdx,8),%rdx + movzbq 2(%r9,%rbx,8),%rbx + xorl 2(%rdi,%rdx,8),%eax + xorl 1(%rdi,%rbx,8),%eax + movl %eax,12(%rsi) + subl $1,%ecx + jnz .Lpermute + + xorq %rax,%rax +.Labort: + movq 8(%rsp),%rbp + movq 16(%rsp),%rbx + addq $24,%rsp + .byte 0xf3,0xc3 +.size AES_set_decrypt_key,.-AES_set_decrypt_key +.globl AES_cbc_encrypt +.type AES_cbc_encrypt,@function +.align 16 +AES_cbc_encrypt: + cmpq $0,%rdx + je .Lcbc_just_ret + pushq %rbx + pushq %rbp + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + pushfq + cld + movl %r9d,%r9d + + .long 0x1358d4c,0x90000000 +.Lcbc_pic_point: + + cmpq $0,%r9 + je .LDECRYPT + + leaq AES_Te-.Lcbc_pic_point(%r14),%r14 + + + leaq -64-248(%rsp),%r15 + andq $-64,%r15 + + + movq %r14,%r10 + leaq 2048(%r14),%r11 + movq %r15,%r12 + andq $4095,%r10 + andq $4095,%r11 + andq $4095,%r12 + + cmpq %r11,%r12 + jb .Lcbc_te_break_out + subq %r11,%r12 + subq %r12,%r15 + jmp .Lcbc_te_ok +.Lcbc_te_break_out: + subq %r10,%r12 + andq $4095,%r12 + addq $320,%r12 + subq %r12,%r15 +.align 4 +.Lcbc_te_ok: + + xchgq %rsp,%r15 + addq $8,%rsp + movq %r15,0(%rsp) + movq %rdx,8(%rsp) + movq %rcx,16(%rsp) + movq %r8,24(%rsp) + movl $0,56+240(%rsp) + movq %r8,%rbp + movq %rsi,%r9 + movq %rdi,%r8 + movq %rcx,%r15 + + + movq %r15,%r10 + subq %r14,%r10 + andq $4095,%r10 + cmpq $2048,%r10 + jb .Lcbc_do_ecopy + cmpq $4096-248,%r10 + jb .Lcbc_skip_ecopy +.align 4 +.Lcbc_do_ecopy: + movq %r15,%rsi + leaq 56(%rsp),%rdi + leaq 56(%rsp),%r15 + movl $30,%ecx +.long 0x90A548F3 + movl (%rsi),%eax + movl %eax,(%rdi) +.Lcbc_skip_ecopy: + movq %r15,32(%rsp) + + movl $16,%ecx +.align 4 +.Lcbc_prefetch_te: + movq 0(%r14),%r10 + movq 32(%r14),%r11 + movq 64(%r14),%r12 + movq 96(%r14),%r13 + leaq 128(%r14),%r14 + subl $1,%ecx + jnz .Lcbc_prefetch_te + subq $2048,%r14 + + testq $-16,%rdx + movq %rdx,%r10 + movl 0(%rbp),%eax + movl 4(%rbp),%ebx + movl 8(%rbp),%ecx + movl 12(%rbp),%edx + jz .Lcbc_enc_tail + +.align 4 +.Lcbc_enc_loop: + xorl 0(%r8),%eax + xorl 4(%r8),%ebx + xorl 8(%r8),%ecx + xorl 12(%r8),%edx + movq %r8,40(%rsp) + + movq 32(%rsp),%r15 + call _x86_64_AES_encrypt + + movq 40(%rsp),%r8 + movl %eax,0(%r9) + movl %ebx,4(%r9) + movl %ecx,8(%r9) + movl %edx,12(%r9) + + movq 8(%rsp),%r10 + leaq 16(%r8),%r8 + leaq 16(%r9),%r9 + subq $16,%r10 + testq $-16,%r10 + movq %r10,8(%rsp) + jnz .Lcbc_enc_loop + testq $15,%r10 + jnz .Lcbc_enc_tail + movq 24(%rsp),%rbp + movl %eax,0(%rbp) + movl %ebx,4(%rbp) + movl %ecx,8(%rbp) + movl %edx,12(%rbp) + +.align 4 +.Lcbc_cleanup: + cmpl $0,56+240(%rsp) + leaq 56(%rsp),%rdi + je .Lcbc_exit + movl $30,%ecx + xorq %rax,%rax +.long 0x90AB48F3 +.Lcbc_exit: + movq 0(%rsp),%rsp + popfq + popq %r15 + popq %r14 + popq %r13 + popq %r12 + popq %rbp + popq %rbx +.Lcbc_just_ret: + .byte 0xf3,0xc3 +.align 4 +.Lcbc_enc_tail: + movq %rax,%r11 + movq %rcx,%r12 + movq %r10,%rcx + movq %r8,%rsi + movq %r9,%rdi +.long 0xF689A4F3 + movq $16,%rcx + subq %r10,%rcx + xorq %rax,%rax +.long 0xF689AAF3 + movq %r9,%r8 + movq $16,8(%rsp) + movq %r11,%rax + movq %r12,%rcx + jmp .Lcbc_enc_loop + +.align 16 +.LDECRYPT: + leaq AES_Td-.Lcbc_pic_point(%r14),%r14 + + + leaq -64-248(%rsp),%r15 + andq $-64,%r15 + + + movq %r14,%r10 + leaq 2304(%r14),%r11 + movq %r15,%r12 + andq $4095,%r10 + andq $4095,%r11 + andq $4095,%r12 + + cmpq %r11,%r12 + jb .Lcbc_td_break_out + subq %r11,%r12 + subq %r12,%r15 + jmp .Lcbc_td_ok +.Lcbc_td_break_out: + subq %r10,%r12 + andq $4095,%r12 + addq $320,%r12 + subq %r12,%r15 +.align 4 +.Lcbc_td_ok: + + xchgq %rsp,%r15 + addq $8,%rsp + movq %r15,0(%rsp) + movq %rdx,8(%rsp) + movq %rcx,16(%rsp) + movq %r8,24(%rsp) + movl $0,56+240(%rsp) + movq %r8,%rbp + movq %rsi,%r9 + movq %rdi,%r8 + movq %rcx,%r15 + + + movq %r15,%r10 + subq %r14,%r10 + andq $4095,%r10 + cmpq $2304,%r10 + jb .Lcbc_do_dcopy + cmpq $4096-248,%r10 + jb .Lcbc_skip_dcopy +.align 4 +.Lcbc_do_dcopy: + movq %r15,%rsi + leaq 56(%rsp),%rdi + leaq 56(%rsp),%r15 + movl $30,%ecx +.long 0x90A548F3 + movl (%rsi),%eax + movl %eax,(%rdi) +.Lcbc_skip_dcopy: + movq %r15,32(%rsp) + + movl $18,%ecx +.align 4 +.Lcbc_prefetch_td: + movq 0(%r14),%r10 + movq 32(%r14),%r11 + movq 64(%r14),%r12 + movq 96(%r14),%r13 + leaq 128(%r14),%r14 + subl $1,%ecx + jnz .Lcbc_prefetch_td + subq $2304,%r14 + + cmpq %r8,%r9 + je .Lcbc_dec_in_place + + movq %rbp,40(%rsp) +.align 4 +.Lcbc_dec_loop: + movl 0(%r8),%eax + movl 4(%r8),%ebx + movl 8(%r8),%ecx + movl 12(%r8),%edx + movq %r8,8+40(%rsp) + + movq 32(%rsp),%r15 + call _x86_64_AES_decrypt + + movq 40(%rsp),%rbp + movq 8+40(%rsp),%r8 + xorl 0(%rbp),%eax + xorl 4(%rbp),%ebx + xorl 8(%rbp),%ecx + xorl 12(%rbp),%edx + movq %r8,%rbp + + movq 8(%rsp),%r10 + subq $16,%r10 + jc .Lcbc_dec_partial + movq %r10,8(%rsp) + movq %rbp,40(%rsp) + + movl %eax,0(%r9) + movl %ebx,4(%r9) + movl %ecx,8(%r9) + movl %edx,12(%r9) + + leaq 16(%r8),%r8 + leaq 16(%r9),%r9 + jnz .Lcbc_dec_loop +.Lcbc_dec_end: + movq 24(%rsp),%r12 + movq 0(%rbp),%r10 + movq 8(%rbp),%r11 + movq %r10,0(%r12) + movq %r11,8(%r12) + jmp .Lcbc_cleanup + +.align 4 +.Lcbc_dec_partial: + movl %eax,0+40(%rsp) + movl %ebx,4+40(%rsp) + movl %ecx,8+40(%rsp) + movl %edx,12+40(%rsp) + movq %r9,%rdi + leaq 40(%rsp),%rsi + movq $16,%rcx + addq %r10,%rcx +.long 0xF689A4F3 + jmp .Lcbc_dec_end + +.align 16 +.Lcbc_dec_in_place: + movl 0(%r8),%eax + movl 4(%r8),%ebx + movl 8(%r8),%ecx + movl 12(%r8),%edx + + movq %r8,40(%rsp) + movq 32(%rsp),%r15 + call _x86_64_AES_decrypt + + movq 40(%rsp),%r8 + movq 24(%rsp),%rbp + xorl 0(%rbp),%eax + xorl 4(%rbp),%ebx + xorl 8(%rbp),%ecx + xorl 12(%rbp),%edx + + movq 0(%r8),%r10 + movq 8(%r8),%r11 + movq %r10,0(%rbp) + movq %r11,8(%rbp) + + movl %eax,0(%r9) + movl %ebx,4(%r9) + movl %ecx,8(%r9) + movl %edx,12(%r9) + + movq 8(%rsp),%rcx + leaq 16(%r8),%r8 + leaq 16(%r9),%r9 + subq $16,%rcx + jc .Lcbc_dec_in_place_partial + movq %rcx,8(%rsp) + jnz .Lcbc_dec_in_place + jmp .Lcbc_cleanup + +.align 4 +.Lcbc_dec_in_place_partial: + + leaq (%r9,%rcx,1),%rdi + leaq (%rbp,%rcx,1),%rsi + negq %rcx +.long 0xF689A4F3 + jmp .Lcbc_cleanup +.size AES_cbc_encrypt,.-AES_cbc_encrypt +.globl AES_Te +.align 64 +AES_Te: +.long 0xa56363c6,0xa56363c6 +.long 0x847c7cf8,0x847c7cf8 +.long 0x997777ee,0x997777ee +.long 0x8d7b7bf6,0x8d7b7bf6 +.long 0x0df2f2ff,0x0df2f2ff +.long 0xbd6b6bd6,0xbd6b6bd6 +.long 0xb16f6fde,0xb16f6fde +.long 0x54c5c591,0x54c5c591 +.long 0x50303060,0x50303060 +.long 0x03010102,0x03010102 +.long 0xa96767ce,0xa96767ce +.long 0x7d2b2b56,0x7d2b2b56 +.long 0x19fefee7,0x19fefee7 +.long 0x62d7d7b5,0x62d7d7b5 +.long 0xe6abab4d,0xe6abab4d +.long 0x9a7676ec,0x9a7676ec +.long 0x45caca8f,0x45caca8f +.long 0x9d82821f,0x9d82821f +.long 0x40c9c989,0x40c9c989 +.long 0x877d7dfa,0x877d7dfa +.long 0x15fafaef,0x15fafaef +.long 0xeb5959b2,0xeb5959b2 +.long 0xc947478e,0xc947478e +.long 0x0bf0f0fb,0x0bf0f0fb +.long 0xecadad41,0xecadad41 +.long 0x67d4d4b3,0x67d4d4b3 +.long 0xfda2a25f,0xfda2a25f +.long 0xeaafaf45,0xeaafaf45 +.long 0xbf9c9c23,0xbf9c9c23 +.long 0xf7a4a453,0xf7a4a453 +.long 0x967272e4,0x967272e4 +.long 0x5bc0c09b,0x5bc0c09b +.long 0xc2b7b775,0xc2b7b775 +.long 0x1cfdfde1,0x1cfdfde1 +.long 0xae93933d,0xae93933d +.long 0x6a26264c,0x6a26264c +.long 0x5a36366c,0x5a36366c +.long 0x413f3f7e,0x413f3f7e +.long 0x02f7f7f5,0x02f7f7f5 +.long 0x4fcccc83,0x4fcccc83 +.long 0x5c343468,0x5c343468 +.long 0xf4a5a551,0xf4a5a551 +.long 0x34e5e5d1,0x34e5e5d1 +.long 0x08f1f1f9,0x08f1f1f9 +.long 0x937171e2,0x937171e2 +.long 0x73d8d8ab,0x73d8d8ab +.long 0x53313162,0x53313162 +.long 0x3f15152a,0x3f15152a +.long 0x0c040408,0x0c040408 +.long 0x52c7c795,0x52c7c795 +.long 0x65232346,0x65232346 +.long 0x5ec3c39d,0x5ec3c39d +.long 0x28181830,0x28181830 +.long 0xa1969637,0xa1969637 +.long 0x0f05050a,0x0f05050a +.long 0xb59a9a2f,0xb59a9a2f +.long 0x0907070e,0x0907070e +.long 0x36121224,0x36121224 +.long 0x9b80801b,0x9b80801b +.long 0x3de2e2df,0x3de2e2df +.long 0x26ebebcd,0x26ebebcd +.long 0x6927274e,0x6927274e +.long 0xcdb2b27f,0xcdb2b27f +.long 0x9f7575ea,0x9f7575ea +.long 0x1b090912,0x1b090912 +.long 0x9e83831d,0x9e83831d +.long 0x742c2c58,0x742c2c58 +.long 0x2e1a1a34,0x2e1a1a34 +.long 0x2d1b1b36,0x2d1b1b36 +.long 0xb26e6edc,0xb26e6edc +.long 0xee5a5ab4,0xee5a5ab4 +.long 0xfba0a05b,0xfba0a05b +.long 0xf65252a4,0xf65252a4 +.long 0x4d3b3b76,0x4d3b3b76 +.long 0x61d6d6b7,0x61d6d6b7 +.long 0xceb3b37d,0xceb3b37d +.long 0x7b292952,0x7b292952 +.long 0x3ee3e3dd,0x3ee3e3dd +.long 0x712f2f5e,0x712f2f5e +.long 0x97848413,0x97848413 +.long 0xf55353a6,0xf55353a6 +.long 0x68d1d1b9,0x68d1d1b9 +.long 0x00000000,0x00000000 +.long 0x2cededc1,0x2cededc1 +.long 0x60202040,0x60202040 +.long 0x1ffcfce3,0x1ffcfce3 +.long 0xc8b1b179,0xc8b1b179 +.long 0xed5b5bb6,0xed5b5bb6 +.long 0xbe6a6ad4,0xbe6a6ad4 +.long 0x46cbcb8d,0x46cbcb8d +.long 0xd9bebe67,0xd9bebe67 +.long 0x4b393972,0x4b393972 +.long 0xde4a4a94,0xde4a4a94 +.long 0xd44c4c98,0xd44c4c98 +.long 0xe85858b0,0xe85858b0 +.long 0x4acfcf85,0x4acfcf85 +.long 0x6bd0d0bb,0x6bd0d0bb +.long 0x2aefefc5,0x2aefefc5 +.long 0xe5aaaa4f,0xe5aaaa4f +.long 0x16fbfbed,0x16fbfbed +.long 0xc5434386,0xc5434386 +.long 0xd74d4d9a,0xd74d4d9a +.long 0x55333366,0x55333366 +.long 0x94858511,0x94858511 +.long 0xcf45458a,0xcf45458a +.long 0x10f9f9e9,0x10f9f9e9 +.long 0x06020204,0x06020204 +.long 0x817f7ffe,0x817f7ffe +.long 0xf05050a0,0xf05050a0 +.long 0x443c3c78,0x443c3c78 +.long 0xba9f9f25,0xba9f9f25 +.long 0xe3a8a84b,0xe3a8a84b +.long 0xf35151a2,0xf35151a2 +.long 0xfea3a35d,0xfea3a35d +.long 0xc0404080,0xc0404080 +.long 0x8a8f8f05,0x8a8f8f05 +.long 0xad92923f,0xad92923f +.long 0xbc9d9d21,0xbc9d9d21 +.long 0x48383870,0x48383870 +.long 0x04f5f5f1,0x04f5f5f1 +.long 0xdfbcbc63,0xdfbcbc63 +.long 0xc1b6b677,0xc1b6b677 +.long 0x75dadaaf,0x75dadaaf +.long 0x63212142,0x63212142 +.long 0x30101020,0x30101020 +.long 0x1affffe5,0x1affffe5 +.long 0x0ef3f3fd,0x0ef3f3fd +.long 0x6dd2d2bf,0x6dd2d2bf +.long 0x4ccdcd81,0x4ccdcd81 +.long 0x140c0c18,0x140c0c18 +.long 0x35131326,0x35131326 +.long 0x2fececc3,0x2fececc3 +.long 0xe15f5fbe,0xe15f5fbe +.long 0xa2979735,0xa2979735 +.long 0xcc444488,0xcc444488 +.long 0x3917172e,0x3917172e +.long 0x57c4c493,0x57c4c493 +.long 0xf2a7a755,0xf2a7a755 +.long 0x827e7efc,0x827e7efc +.long 0x473d3d7a,0x473d3d7a +.long 0xac6464c8,0xac6464c8 +.long 0xe75d5dba,0xe75d5dba +.long 0x2b191932,0x2b191932 +.long 0x957373e6,0x957373e6 +.long 0xa06060c0,0xa06060c0 +.long 0x98818119,0x98818119 +.long 0xd14f4f9e,0xd14f4f9e +.long 0x7fdcdca3,0x7fdcdca3 +.long 0x66222244,0x66222244 +.long 0x7e2a2a54,0x7e2a2a54 +.long 0xab90903b,0xab90903b +.long 0x8388880b,0x8388880b +.long 0xca46468c,0xca46468c +.long 0x29eeeec7,0x29eeeec7 +.long 0xd3b8b86b,0xd3b8b86b +.long 0x3c141428,0x3c141428 +.long 0x79dedea7,0x79dedea7 +.long 0xe25e5ebc,0xe25e5ebc +.long 0x1d0b0b16,0x1d0b0b16 +.long 0x76dbdbad,0x76dbdbad +.long 0x3be0e0db,0x3be0e0db +.long 0x56323264,0x56323264 +.long 0x4e3a3a74,0x4e3a3a74 +.long 0x1e0a0a14,0x1e0a0a14 +.long 0xdb494992,0xdb494992 +.long 0x0a06060c,0x0a06060c +.long 0x6c242448,0x6c242448 +.long 0xe45c5cb8,0xe45c5cb8 +.long 0x5dc2c29f,0x5dc2c29f +.long 0x6ed3d3bd,0x6ed3d3bd +.long 0xefacac43,0xefacac43 +.long 0xa66262c4,0xa66262c4 +.long 0xa8919139,0xa8919139 +.long 0xa4959531,0xa4959531 +.long 0x37e4e4d3,0x37e4e4d3 +.long 0x8b7979f2,0x8b7979f2 +.long 0x32e7e7d5,0x32e7e7d5 +.long 0x43c8c88b,0x43c8c88b +.long 0x5937376e,0x5937376e +.long 0xb76d6dda,0xb76d6dda +.long 0x8c8d8d01,0x8c8d8d01 +.long 0x64d5d5b1,0x64d5d5b1 +.long 0xd24e4e9c,0xd24e4e9c +.long 0xe0a9a949,0xe0a9a949 +.long 0xb46c6cd8,0xb46c6cd8 +.long 0xfa5656ac,0xfa5656ac +.long 0x07f4f4f3,0x07f4f4f3 +.long 0x25eaeacf,0x25eaeacf +.long 0xaf6565ca,0xaf6565ca +.long 0x8e7a7af4,0x8e7a7af4 +.long 0xe9aeae47,0xe9aeae47 +.long 0x18080810,0x18080810 +.long 0xd5baba6f,0xd5baba6f +.long 0x887878f0,0x887878f0 +.long 0x6f25254a,0x6f25254a +.long 0x722e2e5c,0x722e2e5c +.long 0x241c1c38,0x241c1c38 +.long 0xf1a6a657,0xf1a6a657 +.long 0xc7b4b473,0xc7b4b473 +.long 0x51c6c697,0x51c6c697 +.long 0x23e8e8cb,0x23e8e8cb +.long 0x7cdddda1,0x7cdddda1 +.long 0x9c7474e8,0x9c7474e8 +.long 0x211f1f3e,0x211f1f3e +.long 0xdd4b4b96,0xdd4b4b96 +.long 0xdcbdbd61,0xdcbdbd61 +.long 0x868b8b0d,0x868b8b0d +.long 0x858a8a0f,0x858a8a0f +.long 0x907070e0,0x907070e0 +.long 0x423e3e7c,0x423e3e7c +.long 0xc4b5b571,0xc4b5b571 +.long 0xaa6666cc,0xaa6666cc +.long 0xd8484890,0xd8484890 +.long 0x05030306,0x05030306 +.long 0x01f6f6f7,0x01f6f6f7 +.long 0x120e0e1c,0x120e0e1c +.long 0xa36161c2,0xa36161c2 +.long 0x5f35356a,0x5f35356a +.long 0xf95757ae,0xf95757ae +.long 0xd0b9b969,0xd0b9b969 +.long 0x91868617,0x91868617 +.long 0x58c1c199,0x58c1c199 +.long 0x271d1d3a,0x271d1d3a +.long 0xb99e9e27,0xb99e9e27 +.long 0x38e1e1d9,0x38e1e1d9 +.long 0x13f8f8eb,0x13f8f8eb +.long 0xb398982b,0xb398982b +.long 0x33111122,0x33111122 +.long 0xbb6969d2,0xbb6969d2 +.long 0x70d9d9a9,0x70d9d9a9 +.long 0x898e8e07,0x898e8e07 +.long 0xa7949433,0xa7949433 +.long 0xb69b9b2d,0xb69b9b2d +.long 0x221e1e3c,0x221e1e3c +.long 0x92878715,0x92878715 +.long 0x20e9e9c9,0x20e9e9c9 +.long 0x49cece87,0x49cece87 +.long 0xff5555aa,0xff5555aa +.long 0x78282850,0x78282850 +.long 0x7adfdfa5,0x7adfdfa5 +.long 0x8f8c8c03,0x8f8c8c03 +.long 0xf8a1a159,0xf8a1a159 +.long 0x80898909,0x80898909 +.long 0x170d0d1a,0x170d0d1a +.long 0xdabfbf65,0xdabfbf65 +.long 0x31e6e6d7,0x31e6e6d7 +.long 0xc6424284,0xc6424284 +.long 0xb86868d0,0xb86868d0 +.long 0xc3414182,0xc3414182 +.long 0xb0999929,0xb0999929 +.long 0x772d2d5a,0x772d2d5a +.long 0x110f0f1e,0x110f0f1e +.long 0xcbb0b07b,0xcbb0b07b +.long 0xfc5454a8,0xfc5454a8 +.long 0xd6bbbb6d,0xd6bbbb6d +.long 0x3a16162c,0x3a16162c +.long 0x00000001, 0x00000002, 0x00000004, 0x00000008 +.long 0x00000010, 0x00000020, 0x00000040, 0x00000080 +.long 0x0000001b, 0x00000036, 0, 0, 0, 0, 0, 0 +.globl AES_Td +.align 64 +AES_Td: +.long 0x50a7f451,0x50a7f451 +.long 0x5365417e,0x5365417e +.long 0xc3a4171a,0xc3a4171a +.long 0x965e273a,0x965e273a +.long 0xcb6bab3b,0xcb6bab3b +.long 0xf1459d1f,0xf1459d1f +.long 0xab58faac,0xab58faac +.long 0x9303e34b,0x9303e34b +.long 0x55fa3020,0x55fa3020 +.long 0xf66d76ad,0xf66d76ad +.long 0x9176cc88,0x9176cc88 +.long 0x254c02f5,0x254c02f5 +.long 0xfcd7e54f,0xfcd7e54f +.long 0xd7cb2ac5,0xd7cb2ac5 +.long 0x80443526,0x80443526 +.long 0x8fa362b5,0x8fa362b5 +.long 0x495ab1de,0x495ab1de +.long 0x671bba25,0x671bba25 +.long 0x980eea45,0x980eea45 +.long 0xe1c0fe5d,0xe1c0fe5d +.long 0x02752fc3,0x02752fc3 +.long 0x12f04c81,0x12f04c81 +.long 0xa397468d,0xa397468d +.long 0xc6f9d36b,0xc6f9d36b +.long 0xe75f8f03,0xe75f8f03 +.long 0x959c9215,0x959c9215 +.long 0xeb7a6dbf,0xeb7a6dbf +.long 0xda595295,0xda595295 +.long 0x2d83bed4,0x2d83bed4 +.long 0xd3217458,0xd3217458 +.long 0x2969e049,0x2969e049 +.long 0x44c8c98e,0x44c8c98e +.long 0x6a89c275,0x6a89c275 +.long 0x78798ef4,0x78798ef4 +.long 0x6b3e5899,0x6b3e5899 +.long 0xdd71b927,0xdd71b927 +.long 0xb64fe1be,0xb64fe1be +.long 0x17ad88f0,0x17ad88f0 +.long 0x66ac20c9,0x66ac20c9 +.long 0xb43ace7d,0xb43ace7d +.long 0x184adf63,0x184adf63 +.long 0x82311ae5,0x82311ae5 +.long 0x60335197,0x60335197 +.long 0x457f5362,0x457f5362 +.long 0xe07764b1,0xe07764b1 +.long 0x84ae6bbb,0x84ae6bbb +.long 0x1ca081fe,0x1ca081fe +.long 0x942b08f9,0x942b08f9 +.long 0x58684870,0x58684870 +.long 0x19fd458f,0x19fd458f +.long 0x876cde94,0x876cde94 +.long 0xb7f87b52,0xb7f87b52 +.long 0x23d373ab,0x23d373ab +.long 0xe2024b72,0xe2024b72 +.long 0x578f1fe3,0x578f1fe3 +.long 0x2aab5566,0x2aab5566 +.long 0x0728ebb2,0x0728ebb2 +.long 0x03c2b52f,0x03c2b52f +.long 0x9a7bc586,0x9a7bc586 +.long 0xa50837d3,0xa50837d3 +.long 0xf2872830,0xf2872830 +.long 0xb2a5bf23,0xb2a5bf23 +.long 0xba6a0302,0xba6a0302 +.long 0x5c8216ed,0x5c8216ed +.long 0x2b1ccf8a,0x2b1ccf8a +.long 0x92b479a7,0x92b479a7 +.long 0xf0f207f3,0xf0f207f3 +.long 0xa1e2694e,0xa1e2694e +.long 0xcdf4da65,0xcdf4da65 +.long 0xd5be0506,0xd5be0506 +.long 0x1f6234d1,0x1f6234d1 +.long 0x8afea6c4,0x8afea6c4 +.long 0x9d532e34,0x9d532e34 +.long 0xa055f3a2,0xa055f3a2 +.long 0x32e18a05,0x32e18a05 +.long 0x75ebf6a4,0x75ebf6a4 +.long 0x39ec830b,0x39ec830b +.long 0xaaef6040,0xaaef6040 +.long 0x069f715e,0x069f715e +.long 0x51106ebd,0x51106ebd +.long 0xf98a213e,0xf98a213e +.long 0x3d06dd96,0x3d06dd96 +.long 0xae053edd,0xae053edd +.long 0x46bde64d,0x46bde64d +.long 0xb58d5491,0xb58d5491 +.long 0x055dc471,0x055dc471 +.long 0x6fd40604,0x6fd40604 +.long 0xff155060,0xff155060 +.long 0x24fb9819,0x24fb9819 +.long 0x97e9bdd6,0x97e9bdd6 +.long 0xcc434089,0xcc434089 +.long 0x779ed967,0x779ed967 +.long 0xbd42e8b0,0xbd42e8b0 +.long 0x888b8907,0x888b8907 +.long 0x385b19e7,0x385b19e7 +.long 0xdbeec879,0xdbeec879 +.long 0x470a7ca1,0x470a7ca1 +.long 0xe90f427c,0xe90f427c +.long 0xc91e84f8,0xc91e84f8 +.long 0x00000000,0x00000000 +.long 0x83868009,0x83868009 +.long 0x48ed2b32,0x48ed2b32 +.long 0xac70111e,0xac70111e +.long 0x4e725a6c,0x4e725a6c +.long 0xfbff0efd,0xfbff0efd +.long 0x5638850f,0x5638850f +.long 0x1ed5ae3d,0x1ed5ae3d +.long 0x27392d36,0x27392d36 +.long 0x64d90f0a,0x64d90f0a +.long 0x21a65c68,0x21a65c68 +.long 0xd1545b9b,0xd1545b9b +.long 0x3a2e3624,0x3a2e3624 +.long 0xb1670a0c,0xb1670a0c +.long 0x0fe75793,0x0fe75793 +.long 0xd296eeb4,0xd296eeb4 +.long 0x9e919b1b,0x9e919b1b +.long 0x4fc5c080,0x4fc5c080 +.long 0xa220dc61,0xa220dc61 +.long 0x694b775a,0x694b775a +.long 0x161a121c,0x161a121c +.long 0x0aba93e2,0x0aba93e2 +.long 0xe52aa0c0,0xe52aa0c0 +.long 0x43e0223c,0x43e0223c +.long 0x1d171b12,0x1d171b12 +.long 0x0b0d090e,0x0b0d090e +.long 0xadc78bf2,0xadc78bf2 +.long 0xb9a8b62d,0xb9a8b62d +.long 0xc8a91e14,0xc8a91e14 +.long 0x8519f157,0x8519f157 +.long 0x4c0775af,0x4c0775af +.long 0xbbdd99ee,0xbbdd99ee +.long 0xfd607fa3,0xfd607fa3 +.long 0x9f2601f7,0x9f2601f7 +.long 0xbcf5725c,0xbcf5725c +.long 0xc53b6644,0xc53b6644 +.long 0x347efb5b,0x347efb5b +.long 0x7629438b,0x7629438b +.long 0xdcc623cb,0xdcc623cb +.long 0x68fcedb6,0x68fcedb6 +.long 0x63f1e4b8,0x63f1e4b8 +.long 0xcadc31d7,0xcadc31d7 +.long 0x10856342,0x10856342 +.long 0x40229713,0x40229713 +.long 0x2011c684,0x2011c684 +.long 0x7d244a85,0x7d244a85 +.long 0xf83dbbd2,0xf83dbbd2 +.long 0x1132f9ae,0x1132f9ae +.long 0x6da129c7,0x6da129c7 +.long 0x4b2f9e1d,0x4b2f9e1d +.long 0xf330b2dc,0xf330b2dc +.long 0xec52860d,0xec52860d +.long 0xd0e3c177,0xd0e3c177 +.long 0x6c16b32b,0x6c16b32b +.long 0x99b970a9,0x99b970a9 +.long 0xfa489411,0xfa489411 +.long 0x2264e947,0x2264e947 +.long 0xc48cfca8,0xc48cfca8 +.long 0x1a3ff0a0,0x1a3ff0a0 +.long 0xd82c7d56,0xd82c7d56 +.long 0xef903322,0xef903322 +.long 0xc74e4987,0xc74e4987 +.long 0xc1d138d9,0xc1d138d9 +.long 0xfea2ca8c,0xfea2ca8c +.long 0x360bd498,0x360bd498 +.long 0xcf81f5a6,0xcf81f5a6 +.long 0x28de7aa5,0x28de7aa5 +.long 0x268eb7da,0x268eb7da +.long 0xa4bfad3f,0xa4bfad3f +.long 0xe49d3a2c,0xe49d3a2c +.long 0x0d927850,0x0d927850 +.long 0x9bcc5f6a,0x9bcc5f6a +.long 0x62467e54,0x62467e54 +.long 0xc2138df6,0xc2138df6 +.long 0xe8b8d890,0xe8b8d890 +.long 0x5ef7392e,0x5ef7392e +.long 0xf5afc382,0xf5afc382 +.long 0xbe805d9f,0xbe805d9f +.long 0x7c93d069,0x7c93d069 +.long 0xa92dd56f,0xa92dd56f +.long 0xb31225cf,0xb31225cf +.long 0x3b99acc8,0x3b99acc8 +.long 0xa77d1810,0xa77d1810 +.long 0x6e639ce8,0x6e639ce8 +.long 0x7bbb3bdb,0x7bbb3bdb +.long 0x097826cd,0x097826cd +.long 0xf418596e,0xf418596e +.long 0x01b79aec,0x01b79aec +.long 0xa89a4f83,0xa89a4f83 +.long 0x656e95e6,0x656e95e6 +.long 0x7ee6ffaa,0x7ee6ffaa +.long 0x08cfbc21,0x08cfbc21 +.long 0xe6e815ef,0xe6e815ef +.long 0xd99be7ba,0xd99be7ba +.long 0xce366f4a,0xce366f4a +.long 0xd4099fea,0xd4099fea +.long 0xd67cb029,0xd67cb029 +.long 0xafb2a431,0xafb2a431 +.long 0x31233f2a,0x31233f2a +.long 0x3094a5c6,0x3094a5c6 +.long 0xc066a235,0xc066a235 +.long 0x37bc4e74,0x37bc4e74 +.long 0xa6ca82fc,0xa6ca82fc +.long 0xb0d090e0,0xb0d090e0 +.long 0x15d8a733,0x15d8a733 +.long 0x4a9804f1,0x4a9804f1 +.long 0xf7daec41,0xf7daec41 +.long 0x0e50cd7f,0x0e50cd7f +.long 0x2ff69117,0x2ff69117 +.long 0x8dd64d76,0x8dd64d76 +.long 0x4db0ef43,0x4db0ef43 +.long 0x544daacc,0x544daacc +.long 0xdf0496e4,0xdf0496e4 +.long 0xe3b5d19e,0xe3b5d19e +.long 0x1b886a4c,0x1b886a4c +.long 0xb81f2cc1,0xb81f2cc1 +.long 0x7f516546,0x7f516546 +.long 0x04ea5e9d,0x04ea5e9d +.long 0x5d358c01,0x5d358c01 +.long 0x737487fa,0x737487fa +.long 0x2e410bfb,0x2e410bfb +.long 0x5a1d67b3,0x5a1d67b3 +.long 0x52d2db92,0x52d2db92 +.long 0x335610e9,0x335610e9 +.long 0x1347d66d,0x1347d66d +.long 0x8c61d79a,0x8c61d79a +.long 0x7a0ca137,0x7a0ca137 +.long 0x8e14f859,0x8e14f859 +.long 0x893c13eb,0x893c13eb +.long 0xee27a9ce,0xee27a9ce +.long 0x35c961b7,0x35c961b7 +.long 0xede51ce1,0xede51ce1 +.long 0x3cb1477a,0x3cb1477a +.long 0x59dfd29c,0x59dfd29c +.long 0x3f73f255,0x3f73f255 +.long 0x79ce1418,0x79ce1418 +.long 0xbf37c773,0xbf37c773 +.long 0xeacdf753,0xeacdf753 +.long 0x5baafd5f,0x5baafd5f +.long 0x146f3ddf,0x146f3ddf +.long 0x86db4478,0x86db4478 +.long 0x81f3afca,0x81f3afca +.long 0x3ec468b9,0x3ec468b9 +.long 0x2c342438,0x2c342438 +.long 0x5f40a3c2,0x5f40a3c2 +.long 0x72c31d16,0x72c31d16 +.long 0x0c25e2bc,0x0c25e2bc +.long 0x8b493c28,0x8b493c28 +.long 0x41950dff,0x41950dff +.long 0x7101a839,0x7101a839 +.long 0xdeb30c08,0xdeb30c08 +.long 0x9ce4b4d8,0x9ce4b4d8 +.long 0x90c15664,0x90c15664 +.long 0x6184cb7b,0x6184cb7b +.long 0x70b632d5,0x70b632d5 +.long 0x745c6c48,0x745c6c48 +.long 0x4257b8d0,0x4257b8d0 +.byte 0x52,0x09,0x6a,0xd5,0x30,0x36,0xa5,0x38 +.byte 0xbf,0x40,0xa3,0x9e,0x81,0xf3,0xd7,0xfb +.byte 0x7c,0xe3,0x39,0x82,0x9b,0x2f,0xff,0x87 +.byte 0x34,0x8e,0x43,0x44,0xc4,0xde,0xe9,0xcb +.byte 0x54,0x7b,0x94,0x32,0xa6,0xc2,0x23,0x3d +.byte 0xee,0x4c,0x95,0x0b,0x42,0xfa,0xc3,0x4e +.byte 0x08,0x2e,0xa1,0x66,0x28,0xd9,0x24,0xb2 +.byte 0x76,0x5b,0xa2,0x49,0x6d,0x8b,0xd1,0x25 +.byte 0x72,0xf8,0xf6,0x64,0x86,0x68,0x98,0x16 +.byte 0xd4,0xa4,0x5c,0xcc,0x5d,0x65,0xb6,0x92 +.byte 0x6c,0x70,0x48,0x50,0xfd,0xed,0xb9,0xda +.byte 0x5e,0x15,0x46,0x57,0xa7,0x8d,0x9d,0x84 +.byte 0x90,0xd8,0xab,0x00,0x8c,0xbc,0xd3,0x0a +.byte 0xf7,0xe4,0x58,0x05,0xb8,0xb3,0x45,0x06 +.byte 0xd0,0x2c,0x1e,0x8f,0xca,0x3f,0x0f,0x02 +.byte 0xc1,0xaf,0xbd,0x03,0x01,0x13,0x8a,0x6b +.byte 0x3a,0x91,0x11,0x41,0x4f,0x67,0xdc,0xea +.byte 0x97,0xf2,0xcf,0xce,0xf0,0xb4,0xe6,0x73 +.byte 0x96,0xac,0x74,0x22,0xe7,0xad,0x35,0x85 +.byte 0xe2,0xf9,0x37,0xe8,0x1c,0x75,0xdf,0x6e +.byte 0x47,0xf1,0x1a,0x71,0x1d,0x29,0xc5,0x89 +.byte 0x6f,0xb7,0x62,0x0e,0xaa,0x18,0xbe,0x1b +.byte 0xfc,0x56,0x3e,0x4b,0xc6,0xd2,0x79,0x20 +.byte 0x9a,0xdb,0xc0,0xfe,0x78,0xcd,0x5a,0xf4 +.byte 0x1f,0xdd,0xa8,0x33,0x88,0x07,0xc7,0x31 +.byte 0xb1,0x12,0x10,0x59,0x27,0x80,0xec,0x5f +.byte 0x60,0x51,0x7f,0xa9,0x19,0xb5,0x4a,0x0d +.byte 0x2d,0xe5,0x7a,0x9f,0x93,0xc9,0x9c,0xef +.byte 0xa0,0xe0,0x3b,0x4d,0xae,0x2a,0xf5,0xb0 +.byte 0xc8,0xeb,0xbb,0x3c,0x83,0x53,0x99,0x61 +.byte 0x17,0x2b,0x04,0x7e,0xba,0x77,0xd6,0x26 +.byte 0xe1,0x69,0x14,0x63,0x55,0x21,0x0c,0x7d Property changes on: secure/lib/libcrypto/amd64/aes-x86_64.S ___________________________________________________________________ Added: svn:mime-type + text/plain Added: svn:keywords + FreeBSD=%H Added: svn:eol-style + native Index: secure/lib/libcrypto/amd64/rc4-x86_64.S =================================================================== --- secure/lib/libcrypto/amd64/rc4-x86_64.S (revision 0) +++ secure/lib/libcrypto/amd64/rc4-x86_64.S (revision 0) @@ -0,0 +1,416 @@ + # $FreeBSD$ +.text + +.globl RC4 +.type RC4,@function +.align 16 +RC4: orq %rsi,%rsi + jne .Lentry + .byte 0xf3,0xc3 +.Lentry: + pushq %r12 + pushq %r13 + + addq $8,%rdi + movl -8(%rdi),%r8d + movl -4(%rdi),%r12d + cmpl $-1,256(%rdi) + je .LRC4_CHAR + incb %r8b + movl (%rdi,%r8,4),%r9d + testq $-8,%rsi + jz .Lloop1 + jmp .Lloop8 +.align 16 +.Lloop8: + addb %r9b,%r12b + movq %r8,%r10 + movl (%rdi,%r12,4),%r13d + rorq $8,%rax + incb %r10b + movl (%rdi,%r10,4),%r11d + cmpq %r10,%r12 + movl %r9d,(%rdi,%r12,4) + cmoveq %r9,%r11 + movl %r13d,(%rdi,%r8,4) + addb %r9b,%r13b + movb (%rdi,%r13,4),%al + addb %r11b,%r12b + movq %r10,%r8 + movl (%rdi,%r12,4),%r13d + rorq $8,%rax + incb %r8b + movl (%rdi,%r8,4),%r9d + cmpq %r8,%r12 + movl %r11d,(%rdi,%r12,4) + cmoveq %r11,%r9 + movl %r13d,(%rdi,%r10,4) + addb %r11b,%r13b + movb (%rdi,%r13,4),%al + addb %r9b,%r12b + movq %r8,%r10 + movl (%rdi,%r12,4),%r13d + rorq $8,%rax + incb %r10b + movl (%rdi,%r10,4),%r11d + cmpq %r10,%r12 + movl %r9d,(%rdi,%r12,4) + cmoveq %r9,%r11 + movl %r13d,(%rdi,%r8,4) + addb %r9b,%r13b + movb (%rdi,%r13,4),%al + addb %r11b,%r12b + movq %r10,%r8 + movl (%rdi,%r12,4),%r13d + rorq $8,%rax + incb %r8b + movl (%rdi,%r8,4),%r9d + cmpq %r8,%r12 + movl %r11d,(%rdi,%r12,4) + cmoveq %r11,%r9 + movl %r13d,(%rdi,%r10,4) + addb %r11b,%r13b + movb (%rdi,%r13,4),%al + addb %r9b,%r12b + movq %r8,%r10 + movl (%rdi,%r12,4),%r13d + rorq $8,%rax + incb %r10b + movl (%rdi,%r10,4),%r11d + cmpq %r10,%r12 + movl %r9d,(%rdi,%r12,4) + cmoveq %r9,%r11 + movl %r13d,(%rdi,%r8,4) + addb %r9b,%r13b + movb (%rdi,%r13,4),%al + addb %r11b,%r12b + movq %r10,%r8 + movl (%rdi,%r12,4),%r13d + rorq $8,%rax + incb %r8b + movl (%rdi,%r8,4),%r9d + cmpq %r8,%r12 + movl %r11d,(%rdi,%r12,4) + cmoveq %r11,%r9 + movl %r13d,(%rdi,%r10,4) + addb %r11b,%r13b + movb (%rdi,%r13,4),%al + addb %r9b,%r12b + movq %r8,%r10 + movl (%rdi,%r12,4),%r13d + rorq $8,%rax + incb %r10b + movl (%rdi,%r10,4),%r11d + cmpq %r10,%r12 + movl %r9d,(%rdi,%r12,4) + cmoveq %r9,%r11 + movl %r13d,(%rdi,%r8,4) + addb %r9b,%r13b + movb (%rdi,%r13,4),%al + addb %r11b,%r12b + movq %r10,%r8 + movl (%rdi,%r12,4),%r13d + rorq $8,%rax + incb %r8b + movl (%rdi,%r8,4),%r9d + cmpq %r8,%r12 + movl %r11d,(%rdi,%r12,4) + cmoveq %r11,%r9 + movl %r13d,(%rdi,%r10,4) + addb %r11b,%r13b + movb (%rdi,%r13,4),%al + rorq $8,%rax + subq $8,%rsi + + xorq (%rdx),%rax + addq $8,%rdx + movq %rax,(%rcx) + addq $8,%rcx + + testq $-8,%rsi + jnz .Lloop8 + cmpq $0,%rsi + jne .Lloop1 +.Lexit: + subb $1,%r8b + movl %r8d,-8(%rdi) + movl %r12d,-4(%rdi) + + popq %r13 + popq %r12 + .byte 0xf3,0xc3 +.align 16 +.Lloop1: + addb %r9b,%r12b + movl (%rdi,%r12,4),%r13d + movl %r9d,(%rdi,%r12,4) + movl %r13d,(%rdi,%r8,4) + addb %r13b,%r9b + incb %r8b + movl (%rdi,%r9,4),%r13d + movl (%rdi,%r8,4),%r9d + xorb (%rdx),%r13b + incq %rdx + movb %r13b,(%rcx) + incq %rcx + decq %rsi + jnz .Lloop1 + jmp .Lexit + +.align 16 +.LRC4_CHAR: + addb $1,%r8b + movzbl (%rdi,%r8,1),%r9d + testq $-8,%rsi + jz .Lcloop1 + cmp $0,260(%rdi) + jnz .Lcloop1 + pushq %rbx + jmp .Lcloop8 +.align 16 +.Lcloop8: + movl (%rdx),%eax + movl 4(%rdx),%ebx + addb %r9b,%r12b + leaq 1(%r8),%r10 + movzbl (%rdi,%r12,1),%r13d + movzbl %r10b,%r10d + movzbl (%rdi,%r10,1),%r11d + movb %r9b,(%rdi,%r12,1) + cmpq %r10,%r12 + movb %r13b,(%rdi,%r8,1) + jne .Lcmov0 + movq %r9,%r11 +.Lcmov0: + addb %r9b,%r13b + xorb (%rdi,%r13,1),%al + rorl $8,%eax + addb %r11b,%r12b + leaq 1(%r10),%r8 + movzbl (%rdi,%r12,1),%r13d + movzbl %r8b,%r8d + movzbl (%rdi,%r8,1),%r9d + movb %r11b,(%rdi,%r12,1) + cmpq %r8,%r12 + movb %r13b,(%rdi,%r10,1) + jne .Lcmov1 + movq %r11,%r9 +.Lcmov1: + addb %r11b,%r13b + xorb (%rdi,%r13,1),%al + rorl $8,%eax + addb %r9b,%r12b + leaq 1(%r8),%r10 + movzbl (%rdi,%r12,1),%r13d + movzbl %r10b,%r10d + movzbl (%rdi,%r10,1),%r11d + movb %r9b,(%rdi,%r12,1) + cmpq %r10,%r12 + movb %r13b,(%rdi,%r8,1) + jne .Lcmov2 + movq %r9,%r11 +.Lcmov2: + addb %r9b,%r13b + xorb (%rdi,%r13,1),%al + rorl $8,%eax + addb %r11b,%r12b + leaq 1(%r10),%r8 + movzbl (%rdi,%r12,1),%r13d + movzbl %r8b,%r8d + movzbl (%rdi,%r8,1),%r9d + movb %r11b,(%rdi,%r12,1) + cmpq %r8,%r12 + movb %r13b,(%rdi,%r10,1) + jne .Lcmov3 + movq %r11,%r9 +.Lcmov3: + addb %r11b,%r13b + xorb (%rdi,%r13,1),%al + rorl $8,%eax + addb %r9b,%r12b + leaq 1(%r8),%r10 + movzbl (%rdi,%r12,1),%r13d + movzbl %r10b,%r10d + movzbl (%rdi,%r10,1),%r11d + movb %r9b,(%rdi,%r12,1) + cmpq %r10,%r12 + movb %r13b,(%rdi,%r8,1) + jne .Lcmov4 + movq %r9,%r11 +.Lcmov4: + addb %r9b,%r13b + xorb (%rdi,%r13,1),%bl + rorl $8,%ebx + addb %r11b,%r12b + leaq 1(%r10),%r8 + movzbl (%rdi,%r12,1),%r13d + movzbl %r8b,%r8d + movzbl (%rdi,%r8,1),%r9d + movb %r11b,(%rdi,%r12,1) + cmpq %r8,%r12 + movb %r13b,(%rdi,%r10,1) + jne .Lcmov5 + movq %r11,%r9 +.Lcmov5: + addb %r11b,%r13b + xorb (%rdi,%r13,1),%bl + rorl $8,%ebx + addb %r9b,%r12b + leaq 1(%r8),%r10 + movzbl (%rdi,%r12,1),%r13d + movzbl %r10b,%r10d + movzbl (%rdi,%r10,1),%r11d + movb %r9b,(%rdi,%r12,1) + cmpq %r10,%r12 + movb %r13b,(%rdi,%r8,1) + jne .Lcmov6 + movq %r9,%r11 +.Lcmov6: + addb %r9b,%r13b + xorb (%rdi,%r13,1),%bl + rorl $8,%ebx + addb %r11b,%r12b + leaq 1(%r10),%r8 + movzbl (%rdi,%r12,1),%r13d + movzbl %r8b,%r8d + movzbl (%rdi,%r8,1),%r9d + movb %r11b,(%rdi,%r12,1) + cmpq %r8,%r12 + movb %r13b,(%rdi,%r10,1) + jne .Lcmov7 + movq %r11,%r9 +.Lcmov7: + addb %r11b,%r13b + xorb (%rdi,%r13,1),%bl + rorl $8,%ebx + leaq -8(%rsi),%rsi + movl %eax,(%rcx) + leaq 8(%rdx),%rdx + movl %ebx,4(%rcx) + leaq 8(%rcx),%rcx + + testq $-8,%rsi + jnz .Lcloop8 + popq %rbx + cmpq $0,%rsi + jne .Lcloop1 + jmp .Lexit +.align 16 +.Lcloop1: + addb %r9b,%r12b + movzbl (%rdi,%r12,1),%r13d + movb %r9b,(%rdi,%r12,1) + movb %r13b,(%rdi,%r8,1) + addb %r9b,%r13b + addb $1,%r8b + movzbl %r13b,%r13d + movzbl %r8b,%r8d + movzbl (%rdi,%r13,1),%r13d + movzbl (%rdi,%r8,1),%r9d + xorb (%rdx),%r13b + leaq 1(%rdx),%rdx + movb %r13b,(%rcx) + leaq 1(%rcx),%rcx + subq $1,%rsi + jnz .Lcloop1 + jmp .Lexit +.size RC4,.-RC4 + +.globl RC4_set_key +.type RC4_set_key,@function +.align 16 +RC4_set_key: + leaq 8(%rdi),%rdi + leaq (%rdx,%rsi,1),%rdx + negq %rsi + movq %rsi,%rcx + xorl %eax,%eax + xorq %r9,%r9 + xorq %r10,%r10 + xorq %r11,%r11 + + movl OPENSSL_ia32cap_P(%rip),%r8d + btl $20,%r8d + jnc .Lw1stloop + btl $30,%r8d + setc %r9b + movl %r9d,260(%rdi) + jmp .Lc1stloop + +.align 16 +.Lw1stloop: + movl %eax,(%rdi,%rax,4) + addb $1,%al + jnc .Lw1stloop + + xorq %r9,%r9 + xorq %r8,%r8 +.align 16 +.Lw2ndloop: + movl (%rdi,%r9,4),%r10d + addb (%rdx,%rsi,1),%r8b + addb %r10b,%r8b + addq $1,%rsi + movl (%rdi,%r8,4),%r11d + cmovzq %rcx,%rsi + movl %r10d,(%rdi,%r8,4) + movl %r11d,(%rdi,%r9,4) + addb $1,%r9b + jnc .Lw2ndloop + jmp .Lexit_key + +.align 16 +.Lc1stloop: + movb %al,(%rdi,%rax,1) + addb $1,%al + jnc .Lc1stloop + + xorq %r9,%r9 + xorq %r8,%r8 +.align 16 +.Lc2ndloop: + movb (%rdi,%r9,1),%r10b + addb (%rdx,%rsi,1),%r8b + addb %r10b,%r8b + addq $1,%rsi + movb (%rdi,%r8,1),%r11b + jnz .Lcnowrap + movq %rcx,%rsi +.Lcnowrap: + movb %r10b,(%rdi,%r8,1) + movb %r11b,(%rdi,%r9,1) + addb $1,%r9b + jnc .Lc2ndloop + movl $-1,256(%rdi) + +.align 16 +.Lexit_key: + xorl %eax,%eax + movl %eax,-8(%rdi) + movl %eax,-4(%rdi) + .byte 0xf3,0xc3 +.size RC4_set_key,.-RC4_set_key + +.globl RC4_options +.type RC4_options,@function +.align 16 +RC4_options: + .long 0x1058d48,0x90000000 + leaq .Lopts-.(%rax),%rax + movl OPENSSL_ia32cap_P(%rip),%edx + btl $20,%edx + jnc .Ldone + addq $12,%rax + btl $30,%edx + jnc .Ldone + addq $13,%rax +.Ldone: + .byte 0xf3,0xc3 +.align 64 +.Lopts: +.byte 114,99,52,40,56,120,44,105,110,116,41,0 +.byte 114,99,52,40,56,120,44,99,104,97,114,41,0 +.byte 114,99,52,40,49,120,44,99,104,97,114,41,0 +.byte 82,67,52,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 +.align 64 +.size RC4_options,.-RC4_options Property changes on: secure/lib/libcrypto/amd64/rc4-x86_64.S ___________________________________________________________________ Added: svn:mime-type + text/plain Added: svn:keywords + FreeBSD=%H Added: svn:eol-style + native Index: secure/lib/libcrypto/amd64/sha256-x86_64.S =================================================================== --- secure/lib/libcrypto/amd64/sha256-x86_64.S (revision 0) +++ secure/lib/libcrypto/amd64/sha256-x86_64.S (revision 0) @@ -0,0 +1,1970 @@ + # $FreeBSD$ +.text + +.globl sha256_block_data_order +.type sha256_block_data_order,@function +.align 16 +sha256_block_data_order: + pushq %rbx + pushq %rbp + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + movq %rsp,%rbp + shlq $4,%rdx + subq $64+32,%rsp + leaq (%rsi,%rdx,4),%rdx + andq $-64,%rsp + movq %rdi,64+0(%rsp) + movq %rsi,64+8(%rsp) + movq %rdx,64+16(%rsp) + movq %rbp,64+24(%rsp) + + .long 0x12d8d48,0x90000000 + leaq K256-.(%rbp),%rbp + + movl 0(%rdi),%eax + movl 4(%rdi),%ebx + movl 8(%rdi),%ecx + movl 12(%rdi),%edx + movl 16(%rdi),%r8d + movl 20(%rdi),%r9d + movl 24(%rdi),%r10d + movl 28(%rdi),%r11d + jmp .Lloop + +.align 16 +.Lloop: + xorq %rdi,%rdi + movl 0(%rsi),%r12d + bswapl %r12d + movl %r8d,%r13d + movl %r8d,%r14d + movl %r9d,%r15d + + rorl $6,%r13d + rorl $11,%r14d + xorl %r10d,%r15d + + xorl %r14d,%r13d + rorl $14,%r14d + andl %r8d,%r15d + movl %r12d,0(%rsp) + + xorl %r14d,%r13d + xorl %r10d,%r15d + addl %r11d,%r12d + + movl %eax,%r11d + addl %r13d,%r12d + + addl %r15d,%r12d + movl %eax,%r13d + movl %eax,%r14d + + rorl $2,%r11d + rorl $13,%r13d + movl %eax,%r15d + addl (%rbp,%rdi,4),%r12d + + xorl %r13d,%r11d + rorl $9,%r13d + orl %ecx,%r14d + + xorl %r13d,%r11d + andl %ecx,%r15d + addl %r12d,%edx + + andl %ebx,%r14d + addl %r12d,%r11d + + orl %r15d,%r14d + leaq 1(%rdi),%rdi + + addl %r14d,%r11d + movl 4(%rsi),%r12d + bswapl %r12d + movl %edx,%r13d + movl %edx,%r14d + movl %r8d,%r15d + + rorl $6,%r13d + rorl $11,%r14d + xorl %r9d,%r15d + + xorl %r14d,%r13d + rorl $14,%r14d + andl %edx,%r15d + movl %r12d,4(%rsp) + + xorl %r14d,%r13d + xorl %r9d,%r15d + addl %r10d,%r12d + + movl %r11d,%r10d + addl %r13d,%r12d + + addl %r15d,%r12d + movl %r11d,%r13d + movl %r11d,%r14d + + rorl $2,%r10d + rorl $13,%r13d + movl %r11d,%r15d + addl (%rbp,%rdi,4),%r12d + + xorl %r13d,%r10d + rorl $9,%r13d + orl %ebx,%r14d + + xorl %r13d,%r10d + andl %ebx,%r15d + addl %r12d,%ecx + + andl %eax,%r14d + addl %r12d,%r10d + + orl %r15d,%r14d + leaq 1(%rdi),%rdi + + addl %r14d,%r10d + movl 8(%rsi),%r12d + bswapl %r12d + movl %ecx,%r13d + movl %ecx,%r14d + movl %edx,%r15d + + rorl $6,%r13d + rorl $11,%r14d + xorl %r8d,%r15d + + xorl %r14d,%r13d + rorl $14,%r14d + andl %ecx,%r15d + movl %r12d,8(%rsp) + + xorl %r14d,%r13d + xorl %r8d,%r15d + addl %r9d,%r12d + + movl %r10d,%r9d + addl %r13d,%r12d + + addl %r15d,%r12d + movl %r10d,%r13d + movl %r10d,%r14d + + rorl $2,%r9d + rorl $13,%r13d + movl %r10d,%r15d + addl (%rbp,%rdi,4),%r12d + + xorl %r13d,%r9d + rorl $9,%r13d + orl %eax,%r14d + + xorl %r13d,%r9d + andl %eax,%r15d + addl %r12d,%ebx + + andl %r11d,%r14d + addl %r12d,%r9d + + orl %r15d,%r14d + leaq 1(%rdi),%rdi + + addl %r14d,%r9d + movl 12(%rsi),%r12d + bswapl %r12d + movl %ebx,%r13d + movl %ebx,%r14d + movl %ecx,%r15d + + rorl $6,%r13d + rorl $11,%r14d + xorl %edx,%r15d + + xorl %r14d,%r13d + rorl $14,%r14d + andl %ebx,%r15d + movl %r12d,12(%rsp) + + xorl %r14d,%r13d + xorl %edx,%r15d + addl %r8d,%r12d + + movl %r9d,%r8d + addl %r13d,%r12d + + addl %r15d,%r12d + movl %r9d,%r13d + movl %r9d,%r14d + + rorl $2,%r8d + rorl $13,%r13d + movl %r9d,%r15d + addl (%rbp,%rdi,4),%r12d + + xorl %r13d,%r8d + rorl $9,%r13d + orl %r11d,%r14d + + xorl %r13d,%r8d + andl %r11d,%r15d + addl %r12d,%eax + + andl %r10d,%r14d + addl %r12d,%r8d + + orl %r15d,%r14d + leaq 1(%rdi),%rdi + + addl %r14d,%r8d + movl 16(%rsi),%r12d + bswapl %r12d + movl %eax,%r13d + movl %eax,%r14d + movl %ebx,%r15d + + rorl $6,%r13d + rorl $11,%r14d + xorl %ecx,%r15d + + xorl %r14d,%r13d + rorl $14,%r14d + andl %eax,%r15d + movl %r12d,16(%rsp) + + xorl %r14d,%r13d + xorl %ecx,%r15d + addl %edx,%r12d + + movl %r8d,%edx + addl %r13d,%r12d + + addl %r15d,%r12d + movl %r8d,%r13d + movl %r8d,%r14d + + rorl $2,%edx + rorl $13,%r13d + movl %r8d,%r15d + addl (%rbp,%rdi,4),%r12d + + xorl %r13d,%edx + rorl $9,%r13d + orl %r10d,%r14d + + xorl %r13d,%edx + andl %r10d,%r15d + addl %r12d,%r11d + + andl %r9d,%r14d + addl %r12d,%edx + + orl %r15d,%r14d + leaq 1(%rdi),%rdi + + addl %r14d,%edx + movl 20(%rsi),%r12d + bswapl %r12d + movl %r11d,%r13d + movl %r11d,%r14d + movl %eax,%r15d + + rorl $6,%r13d + rorl $11,%r14d + xorl %ebx,%r15d + + xorl %r14d,%r13d + rorl $14,%r14d + andl %r11d,%r15d + movl %r12d,20(%rsp) + + xorl %r14d,%r13d + xorl %ebx,%r15d + addl %ecx,%r12d + + movl %edx,%ecx + addl %r13d,%r12d + + addl %r15d,%r12d + movl %edx,%r13d + movl %edx,%r14d + + rorl $2,%ecx + rorl $13,%r13d + movl %edx,%r15d + addl (%rbp,%rdi,4),%r12d + + xorl %r13d,%ecx + rorl $9,%r13d + orl %r9d,%r14d + + xorl %r13d,%ecx + andl %r9d,%r15d + addl %r12d,%r10d + + andl %r8d,%r14d + addl %r12d,%ecx + + orl %r15d,%r14d + leaq 1(%rdi),%rdi + + addl %r14d,%ecx + movl 24(%rsi),%r12d + bswapl %r12d + movl %r10d,%r13d + movl %r10d,%r14d + movl %r11d,%r15d + + rorl $6,%r13d + rorl $11,%r14d + xorl %eax,%r15d + + xorl %r14d,%r13d + rorl $14,%r14d + andl %r10d,%r15d + movl %r12d,24(%rsp) + + xorl %r14d,%r13d + xorl %eax,%r15d + addl %ebx,%r12d + + movl %ecx,%ebx + addl %r13d,%r12d + + addl %r15d,%r12d + movl %ecx,%r13d + movl %ecx,%r14d + + rorl $2,%ebx + rorl $13,%r13d + movl %ecx,%r15d + addl (%rbp,%rdi,4),%r12d + + xorl %r13d,%ebx + rorl $9,%r13d + orl %r8d,%r14d + + xorl %r13d,%ebx + andl %r8d,%r15d + addl %r12d,%r9d + + andl %edx,%r14d + addl %r12d,%ebx + + orl %r15d,%r14d + leaq 1(%rdi),%rdi + + addl %r14d,%ebx + movl 28(%rsi),%r12d + bswapl %r12d + movl %r9d,%r13d + movl %r9d,%r14d + movl %r10d,%r15d + + rorl $6,%r13d + rorl $11,%r14d + xorl %r11d,%r15d + + xorl %r14d,%r13d + rorl $14,%r14d + andl %r9d,%r15d + movl %r12d,28(%rsp) + + xorl %r14d,%r13d + xorl %r11d,%r15d + addl %eax,%r12d + + movl %ebx,%eax + addl %r13d,%r12d + + addl %r15d,%r12d + movl %ebx,%r13d + movl %ebx,%r14d + + rorl $2,%eax + rorl $13,%r13d + movl %ebx,%r15d + addl (%rbp,%rdi,4),%r12d + + xorl %r13d,%eax + rorl $9,%r13d + orl %edx,%r14d + + xorl %r13d,%eax + andl %edx,%r15d + addl %r12d,%r8d + + andl %ecx,%r14d + addl %r12d,%eax + + orl %r15d,%r14d + leaq 1(%rdi),%rdi + + addl %r14d,%eax + movl 32(%rsi),%r12d + bswapl %r12d + movl %r8d,%r13d + movl %r8d,%r14d + movl %r9d,%r15d + + rorl $6,%r13d + rorl $11,%r14d + xorl %r10d,%r15d + + xorl %r14d,%r13d + rorl $14,%r14d + andl %r8d,%r15d + movl %r12d,32(%rsp) + + xorl %r14d,%r13d + xorl %r10d,%r15d + addl %r11d,%r12d + + movl %eax,%r11d + addl %r13d,%r12d + + addl %r15d,%r12d + movl %eax,%r13d + movl %eax,%r14d + + rorl $2,%r11d + rorl $13,%r13d + movl %eax,%r15d + addl (%rbp,%rdi,4),%r12d + + xorl %r13d,%r11d + rorl $9,%r13d + orl %ecx,%r14d + + xorl %r13d,%r11d + andl %ecx,%r15d + addl %r12d,%edx + + andl %ebx,%r14d + addl %r12d,%r11d + + orl %r15d,%r14d + leaq 1(%rdi),%rdi + + addl %r14d,%r11d + movl 36(%rsi),%r12d + bswapl %r12d + movl %edx,%r13d + movl %edx,%r14d + movl %r8d,%r15d + + rorl $6,%r13d + rorl $11,%r14d + xorl %r9d,%r15d + + xorl %r14d,%r13d + rorl $14,%r14d + andl %edx,%r15d + movl %r12d,36(%rsp) + + xorl %r14d,%r13d + xorl %r9d,%r15d + addl %r10d,%r12d + + movl %r11d,%r10d + addl %r13d,%r12d + + addl %r15d,%r12d + movl %r11d,%r13d + movl %r11d,%r14d + + rorl $2,%r10d + rorl $13,%r13d + movl %r11d,%r15d + addl (%rbp,%rdi,4),%r12d + + xorl %r13d,%r10d + rorl $9,%r13d + orl %ebx,%r14d + + xorl %r13d,%r10d + andl %ebx,%r15d + addl %r12d,%ecx + + andl %eax,%r14d + addl %r12d,%r10d + + orl %r15d,%r14d + leaq 1(%rdi),%rdi + + addl %r14d,%r10d + movl 40(%rsi),%r12d + bswapl %r12d + movl %ecx,%r13d + movl %ecx,%r14d + movl %edx,%r15d + + rorl $6,%r13d + rorl $11,%r14d + xorl %r8d,%r15d + + xorl %r14d,%r13d + rorl $14,%r14d + andl %ecx,%r15d + movl %r12d,40(%rsp) + + xorl %r14d,%r13d + xorl %r8d,%r15d + addl %r9d,%r12d + + movl %r10d,%r9d + addl %r13d,%r12d + + addl %r15d,%r12d + movl %r10d,%r13d + movl %r10d,%r14d + + rorl $2,%r9d + rorl $13,%r13d + movl %r10d,%r15d + addl (%rbp,%rdi,4),%r12d + + xorl %r13d,%r9d + rorl $9,%r13d + orl %eax,%r14d + + xorl %r13d,%r9d + andl %eax,%r15d + addl %r12d,%ebx + + andl %r11d,%r14d + addl %r12d,%r9d + + orl %r15d,%r14d + leaq 1(%rdi),%rdi + + addl %r14d,%r9d + movl 44(%rsi),%r12d + bswapl %r12d + movl %ebx,%r13d + movl %ebx,%r14d + movl %ecx,%r15d + + rorl $6,%r13d + rorl $11,%r14d + xorl %edx,%r15d + + xorl %r14d,%r13d + rorl $14,%r14d + andl %ebx,%r15d + movl %r12d,44(%rsp) + + xorl %r14d,%r13d + xorl %edx,%r15d + addl %r8d,%r12d + + movl %r9d,%r8d + addl %r13d,%r12d + + addl %r15d,%r12d + movl %r9d,%r13d + movl %r9d,%r14d + + rorl $2,%r8d + rorl $13,%r13d + movl %r9d,%r15d + addl (%rbp,%rdi,4),%r12d + + xorl %r13d,%r8d + rorl $9,%r13d + orl %r11d,%r14d + + xorl %r13d,%r8d + andl %r11d,%r15d + addl %r12d,%eax + + andl %r10d,%r14d + addl %r12d,%r8d + + orl %r15d,%r14d + leaq 1(%rdi),%rdi + + addl %r14d,%r8d + movl 48(%rsi),%r12d + bswapl %r12d + movl %eax,%r13d + movl %eax,%r14d + movl %ebx,%r15d + + rorl $6,%r13d + rorl $11,%r14d + xorl %ecx,%r15d + + xorl %r14d,%r13d + rorl $14,%r14d + andl %eax,%r15d + movl %r12d,48(%rsp) + + xorl %r14d,%r13d + xorl %ecx,%r15d + addl %edx,%r12d + + movl %r8d,%edx + addl %r13d,%r12d + + addl %r15d,%r12d + movl %r8d,%r13d + movl %r8d,%r14d + + rorl $2,%edx + rorl $13,%r13d + movl %r8d,%r15d + addl (%rbp,%rdi,4),%r12d + + xorl %r13d,%edx + rorl $9,%r13d + orl %r10d,%r14d + + xorl %r13d,%edx + andl %r10d,%r15d + addl %r12d,%r11d + + andl %r9d,%r14d + addl %r12d,%edx + + orl %r15d,%r14d + leaq 1(%rdi),%rdi + + addl %r14d,%edx + movl 52(%rsi),%r12d + bswapl %r12d + movl %r11d,%r13d + movl %r11d,%r14d + movl %eax,%r15d + + rorl $6,%r13d + rorl $11,%r14d + xorl %ebx,%r15d + + xorl %r14d,%r13d + rorl $14,%r14d + andl %r11d,%r15d + movl %r12d,52(%rsp) + + xorl %r14d,%r13d + xorl %ebx,%r15d + addl %ecx,%r12d + + movl %edx,%ecx + addl %r13d,%r12d + + addl %r15d,%r12d + movl %edx,%r13d + movl %edx,%r14d + + rorl $2,%ecx + rorl $13,%r13d + movl %edx,%r15d + addl (%rbp,%rdi,4),%r12d + + xorl %r13d,%ecx + rorl $9,%r13d + orl %r9d,%r14d + + xorl %r13d,%ecx + andl %r9d,%r15d + addl %r12d,%r10d + + andl %r8d,%r14d + addl %r12d,%ecx + + orl %r15d,%r14d + leaq 1(%rdi),%rdi + + addl %r14d,%ecx + movl 56(%rsi),%r12d + bswapl %r12d + movl %r10d,%r13d + movl %r10d,%r14d + movl %r11d,%r15d + + rorl $6,%r13d + rorl $11,%r14d + xorl %eax,%r15d + + xorl %r14d,%r13d + rorl $14,%r14d + andl %r10d,%r15d + movl %r12d,56(%rsp) + + xorl %r14d,%r13d + xorl %eax,%r15d + addl %ebx,%r12d + + movl %ecx,%ebx + addl %r13d,%r12d + + addl %r15d,%r12d + movl %ecx,%r13d + movl %ecx,%r14d + + rorl $2,%ebx + rorl $13,%r13d + movl %ecx,%r15d + addl (%rbp,%rdi,4),%r12d + + xorl %r13d,%ebx + rorl $9,%r13d + orl %r8d,%r14d + + xorl %r13d,%ebx + andl %r8d,%r15d + addl %r12d,%r9d + + andl %edx,%r14d + addl %r12d,%ebx + + orl %r15d,%r14d + leaq 1(%rdi),%rdi + + addl %r14d,%ebx + movl 60(%rsi),%r12d + bswapl %r12d + movl %r9d,%r13d + movl %r9d,%r14d + movl %r10d,%r15d + + rorl $6,%r13d + rorl $11,%r14d + xorl %r11d,%r15d + + xorl %r14d,%r13d + rorl $14,%r14d + andl %r9d,%r15d + movl %r12d,60(%rsp) + + xorl %r14d,%r13d + xorl %r11d,%r15d + addl %eax,%r12d + + movl %ebx,%eax + addl %r13d,%r12d + + addl %r15d,%r12d + movl %ebx,%r13d + movl %ebx,%r14d + + rorl $2,%eax + rorl $13,%r13d + movl %ebx,%r15d + addl (%rbp,%rdi,4),%r12d + + xorl %r13d,%eax + rorl $9,%r13d + orl %edx,%r14d + + xorl %r13d,%eax + andl %edx,%r15d + addl %r12d,%r8d + + andl %ecx,%r14d + addl %r12d,%eax + + orl %r15d,%r14d + leaq 1(%rdi),%rdi + + addl %r14d,%eax + jmp .Lrounds_16_xx +.align 16 +.Lrounds_16_xx: + movl 4(%rsp),%r13d + movl 56(%rsp),%r12d + + movl %r13d,%r15d + + shrl $3,%r13d + rorl $7,%r15d + + xorl %r15d,%r13d + rorl $11,%r15d + + xorl %r15d,%r13d + movl %r12d,%r14d + + shrl $10,%r12d + rorl $17,%r14d + + xorl %r14d,%r12d + rorl $2,%r14d + + xorl %r14d,%r12d + + addl %r13d,%r12d + + addl 36(%rsp),%r12d + + addl 0(%rsp),%r12d + movl %r8d,%r13d + movl %r8d,%r14d + movl %r9d,%r15d + + rorl $6,%r13d + rorl $11,%r14d + xorl %r10d,%r15d + + xorl %r14d,%r13d + rorl $14,%r14d + andl %r8d,%r15d + movl %r12d,0(%rsp) + + xorl %r14d,%r13d + xorl %r10d,%r15d + addl %r11d,%r12d + + movl %eax,%r11d + addl %r13d,%r12d + + addl %r15d,%r12d + movl %eax,%r13d + movl %eax,%r14d + + rorl $2,%r11d + rorl $13,%r13d + movl %eax,%r15d + addl (%rbp,%rdi,4),%r12d + + xorl %r13d,%r11d + rorl $9,%r13d + orl %ecx,%r14d + + xorl %r13d,%r11d + andl %ecx,%r15d + addl %r12d,%edx + + andl %ebx,%r14d + addl %r12d,%r11d + + orl %r15d,%r14d + leaq 1(%rdi),%rdi + + addl %r14d,%r11d + movl 8(%rsp),%r13d + movl 60(%rsp),%r12d + + movl %r13d,%r15d + + shrl $3,%r13d + rorl $7,%r15d + + xorl %r15d,%r13d + rorl $11,%r15d + + xorl %r15d,%r13d + movl %r12d,%r14d + + shrl $10,%r12d + rorl $17,%r14d + + xorl %r14d,%r12d + rorl $2,%r14d + + xorl %r14d,%r12d + + addl %r13d,%r12d + + addl 40(%rsp),%r12d + + addl 4(%rsp),%r12d + movl %edx,%r13d + movl %edx,%r14d + movl %r8d,%r15d + + rorl $6,%r13d + rorl $11,%r14d + xorl %r9d,%r15d + + xorl %r14d,%r13d + rorl $14,%r14d + andl %edx,%r15d + movl %r12d,4(%rsp) + + xorl %r14d,%r13d + xorl %r9d,%r15d + addl %r10d,%r12d + + movl %r11d,%r10d + addl %r13d,%r12d + + addl %r15d,%r12d + movl %r11d,%r13d + movl %r11d,%r14d + + rorl $2,%r10d + rorl $13,%r13d + movl %r11d,%r15d + addl (%rbp,%rdi,4),%r12d + + xorl %r13d,%r10d + rorl $9,%r13d + orl %ebx,%r14d + + xorl %r13d,%r10d + andl %ebx,%r15d + addl %r12d,%ecx + + andl %eax,%r14d + addl %r12d,%r10d + + orl %r15d,%r14d + leaq 1(%rdi),%rdi + + addl %r14d,%r10d + movl 12(%rsp),%r13d + movl 0(%rsp),%r12d + + movl %r13d,%r15d + + shrl $3,%r13d + rorl $7,%r15d + + xorl %r15d,%r13d + rorl $11,%r15d + + xorl %r15d,%r13d + movl %r12d,%r14d + + shrl $10,%r12d + rorl $17,%r14d + + xorl %r14d,%r12d + rorl $2,%r14d + + xorl %r14d,%r12d + + addl %r13d,%r12d + + addl 44(%rsp),%r12d + + addl 8(%rsp),%r12d + movl %ecx,%r13d + movl %ecx,%r14d + movl %edx,%r15d + + rorl $6,%r13d + rorl $11,%r14d + xorl %r8d,%r15d + + xorl %r14d,%r13d + rorl $14,%r14d + andl %ecx,%r15d + movl %r12d,8(%rsp) + + xorl %r14d,%r13d + xorl %r8d,%r15d + addl %r9d,%r12d + + movl %r10d,%r9d + addl %r13d,%r12d + + addl %r15d,%r12d + movl %r10d,%r13d + movl %r10d,%r14d + + rorl $2,%r9d + rorl $13,%r13d + movl %r10d,%r15d + addl (%rbp,%rdi,4),%r12d + + xorl %r13d,%r9d + rorl $9,%r13d + orl %eax,%r14d + + xorl %r13d,%r9d + andl %eax,%r15d + addl %r12d,%ebx + + andl %r11d,%r14d + addl %r12d,%r9d + + orl %r15d,%r14d + leaq 1(%rdi),%rdi + + addl %r14d,%r9d + movl 16(%rsp),%r13d + movl 4(%rsp),%r12d + + movl %r13d,%r15d + + shrl $3,%r13d + rorl $7,%r15d + + xorl %r15d,%r13d + rorl $11,%r15d + + xorl %r15d,%r13d + movl %r12d,%r14d + + shrl $10,%r12d + rorl $17,%r14d + + xorl %r14d,%r12d + rorl $2,%r14d + + xorl %r14d,%r12d + + addl %r13d,%r12d + + addl 48(%rsp),%r12d + + addl 12(%rsp),%r12d + movl %ebx,%r13d + movl %ebx,%r14d + movl %ecx,%r15d + + rorl $6,%r13d + rorl $11,%r14d + xorl %edx,%r15d + + xorl %r14d,%r13d + rorl $14,%r14d + andl %ebx,%r15d + movl %r12d,12(%rsp) + + xorl %r14d,%r13d + xorl %edx,%r15d + addl %r8d,%r12d + + movl %r9d,%r8d + addl %r13d,%r12d + + addl %r15d,%r12d + movl %r9d,%r13d + movl %r9d,%r14d + + rorl $2,%r8d + rorl $13,%r13d + movl %r9d,%r15d + addl (%rbp,%rdi,4),%r12d + + xorl %r13d,%r8d + rorl $9,%r13d + orl %r11d,%r14d + + xorl %r13d,%r8d + andl %r11d,%r15d + addl %r12d,%eax + + andl %r10d,%r14d + addl %r12d,%r8d + + orl %r15d,%r14d + leaq 1(%rdi),%rdi + + addl %r14d,%r8d + movl 20(%rsp),%r13d + movl 8(%rsp),%r12d + + movl %r13d,%r15d + + shrl $3,%r13d + rorl $7,%r15d + + xorl %r15d,%r13d + rorl $11,%r15d + + xorl %r15d,%r13d + movl %r12d,%r14d + + shrl $10,%r12d + rorl $17,%r14d + + xorl %r14d,%r12d + rorl $2,%r14d + + xorl %r14d,%r12d + + addl %r13d,%r12d + + addl 52(%rsp),%r12d + + addl 16(%rsp),%r12d + movl %eax,%r13d + movl %eax,%r14d + movl %ebx,%r15d + + rorl $6,%r13d + rorl $11,%r14d + xorl %ecx,%r15d + + xorl %r14d,%r13d + rorl $14,%r14d + andl %eax,%r15d + movl %r12d,16(%rsp) + + xorl %r14d,%r13d + xorl %ecx,%r15d + addl %edx,%r12d + + movl %r8d,%edx + addl %r13d,%r12d + + addl %r15d,%r12d + movl %r8d,%r13d + movl %r8d,%r14d + + rorl $2,%edx + rorl $13,%r13d + movl %r8d,%r15d + addl (%rbp,%rdi,4),%r12d + + xorl %r13d,%edx + rorl $9,%r13d + orl %r10d,%r14d + + xorl %r13d,%edx + andl %r10d,%r15d + addl %r12d,%r11d + + andl %r9d,%r14d + addl %r12d,%edx + + orl %r15d,%r14d + leaq 1(%rdi),%rdi + + addl %r14d,%edx + movl 24(%rsp),%r13d + movl 12(%rsp),%r12d + + movl %r13d,%r15d + + shrl $3,%r13d + rorl $7,%r15d + + xorl %r15d,%r13d + rorl $11,%r15d + + xorl %r15d,%r13d + movl %r12d,%r14d + + shrl $10,%r12d + rorl $17,%r14d + + xorl %r14d,%r12d + rorl $2,%r14d + + xorl %r14d,%r12d + + addl %r13d,%r12d + + addl 56(%rsp),%r12d + + addl 20(%rsp),%r12d + movl %r11d,%r13d + movl %r11d,%r14d + movl %eax,%r15d + + rorl $6,%r13d + rorl $11,%r14d + xorl %ebx,%r15d + + xorl %r14d,%r13d + rorl $14,%r14d + andl %r11d,%r15d + movl %r12d,20(%rsp) + + xorl %r14d,%r13d + xorl %ebx,%r15d + addl %ecx,%r12d + + movl %edx,%ecx + addl %r13d,%r12d + + addl %r15d,%r12d + movl %edx,%r13d + movl %edx,%r14d + + rorl $2,%ecx + rorl $13,%r13d + movl %edx,%r15d + addl (%rbp,%rdi,4),%r12d + + xorl %r13d,%ecx + rorl $9,%r13d + orl %r9d,%r14d + + xorl %r13d,%ecx + andl %r9d,%r15d + addl %r12d,%r10d + + andl %r8d,%r14d + addl %r12d,%ecx + + orl %r15d,%r14d + leaq 1(%rdi),%rdi + + addl %r14d,%ecx + movl 28(%rsp),%r13d + movl 16(%rsp),%r12d + + movl %r13d,%r15d + + shrl $3,%r13d + rorl $7,%r15d + + xorl %r15d,%r13d + rorl $11,%r15d + + xorl %r15d,%r13d + movl %r12d,%r14d + + shrl $10,%r12d + rorl $17,%r14d + + xorl %r14d,%r12d + rorl $2,%r14d + + xorl %r14d,%r12d + + addl %r13d,%r12d + + addl 60(%rsp),%r12d + + addl 24(%rsp),%r12d + movl %r10d,%r13d + movl %r10d,%r14d + movl %r11d,%r15d + + rorl $6,%r13d + rorl $11,%r14d + xorl %eax,%r15d + + xorl %r14d,%r13d + rorl $14,%r14d + andl %r10d,%r15d + movl %r12d,24(%rsp) + + xorl %r14d,%r13d + xorl %eax,%r15d + addl %ebx,%r12d + + movl %ecx,%ebx + addl %r13d,%r12d + + addl %r15d,%r12d + movl %ecx,%r13d + movl %ecx,%r14d + + rorl $2,%ebx + rorl $13,%r13d + movl %ecx,%r15d + addl (%rbp,%rdi,4),%r12d + + xorl %r13d,%ebx + rorl $9,%r13d + orl %r8d,%r14d + + xorl %r13d,%ebx + andl %r8d,%r15d + addl %r12d,%r9d + + andl %edx,%r14d + addl %r12d,%ebx + + orl %r15d,%r14d + leaq 1(%rdi),%rdi + + addl %r14d,%ebx + movl 32(%rsp),%r13d + movl 20(%rsp),%r12d + + movl %r13d,%r15d + + shrl $3,%r13d + rorl $7,%r15d + + xorl %r15d,%r13d + rorl $11,%r15d + + xorl %r15d,%r13d + movl %r12d,%r14d + + shrl $10,%r12d + rorl $17,%r14d + + xorl %r14d,%r12d + rorl $2,%r14d + + xorl %r14d,%r12d + + addl %r13d,%r12d + + addl 0(%rsp),%r12d + + addl 28(%rsp),%r12d + movl %r9d,%r13d + movl %r9d,%r14d + movl %r10d,%r15d + + rorl $6,%r13d + rorl $11,%r14d + xorl %r11d,%r15d + + xorl %r14d,%r13d + rorl $14,%r14d + andl %r9d,%r15d + movl %r12d,28(%rsp) + + xorl %r14d,%r13d + xorl %r11d,%r15d + addl %eax,%r12d + + movl %ebx,%eax + addl %r13d,%r12d + + addl %r15d,%r12d + movl %ebx,%r13d + movl %ebx,%r14d + + rorl $2,%eax + rorl $13,%r13d + movl %ebx,%r15d + addl (%rbp,%rdi,4),%r12d + + xorl %r13d,%eax + rorl $9,%r13d + orl %edx,%r14d + + xorl %r13d,%eax + andl %edx,%r15d + addl %r12d,%r8d + + andl %ecx,%r14d + addl %r12d,%eax + + orl %r15d,%r14d + leaq 1(%rdi),%rdi + + addl %r14d,%eax + movl 36(%rsp),%r13d + movl 24(%rsp),%r12d + + movl %r13d,%r15d + + shrl $3,%r13d + rorl $7,%r15d + + xorl %r15d,%r13d + rorl $11,%r15d + + xorl %r15d,%r13d + movl %r12d,%r14d + + shrl $10,%r12d + rorl $17,%r14d + + xorl %r14d,%r12d + rorl $2,%r14d + + xorl %r14d,%r12d + + addl %r13d,%r12d + + addl 4(%rsp),%r12d + + addl 32(%rsp),%r12d + movl %r8d,%r13d + movl %r8d,%r14d + movl %r9d,%r15d + + rorl $6,%r13d + rorl $11,%r14d + xorl %r10d,%r15d + + xorl %r14d,%r13d + rorl $14,%r14d + andl %r8d,%r15d + movl %r12d,32(%rsp) + + xorl %r14d,%r13d + xorl %r10d,%r15d + addl %r11d,%r12d + + movl %eax,%r11d + addl %r13d,%r12d + + addl %r15d,%r12d + movl %eax,%r13d + movl %eax,%r14d + + rorl $2,%r11d + rorl $13,%r13d + movl %eax,%r15d + addl (%rbp,%rdi,4),%r12d + + xorl %r13d,%r11d + rorl $9,%r13d + orl %ecx,%r14d + + xorl %r13d,%r11d + andl %ecx,%r15d + addl %r12d,%edx + + andl %ebx,%r14d + addl %r12d,%r11d + + orl %r15d,%r14d + leaq 1(%rdi),%rdi + + addl %r14d,%r11d + movl 40(%rsp),%r13d + movl 28(%rsp),%r12d + + movl %r13d,%r15d + + shrl $3,%r13d + rorl $7,%r15d + + xorl %r15d,%r13d + rorl $11,%r15d + + xorl %r15d,%r13d + movl %r12d,%r14d + + shrl $10,%r12d + rorl $17,%r14d + + xorl %r14d,%r12d + rorl $2,%r14d + + xorl %r14d,%r12d + + addl %r13d,%r12d + + addl 8(%rsp),%r12d + + addl 36(%rsp),%r12d + movl %edx,%r13d + movl %edx,%r14d + movl %r8d,%r15d + + rorl $6,%r13d + rorl $11,%r14d + xorl %r9d,%r15d + + xorl %r14d,%r13d + rorl $14,%r14d + andl %edx,%r15d + movl %r12d,36(%rsp) + + xorl %r14d,%r13d + xorl %r9d,%r15d + addl %r10d,%r12d + + movl %r11d,%r10d + addl %r13d,%r12d + + addl %r15d,%r12d + movl %r11d,%r13d + movl %r11d,%r14d + + rorl $2,%r10d + rorl $13,%r13d + movl %r11d,%r15d + addl (%rbp,%rdi,4),%r12d + + xorl %r13d,%r10d + rorl $9,%r13d + orl %ebx,%r14d + + xorl %r13d,%r10d + andl %ebx,%r15d + addl %r12d,%ecx + + andl %eax,%r14d + addl %r12d,%r10d + + orl %r15d,%r14d + leaq 1(%rdi),%rdi + + addl %r14d,%r10d + movl 44(%rsp),%r13d + movl 32(%rsp),%r12d + + movl %r13d,%r15d + + shrl $3,%r13d + rorl $7,%r15d + + xorl %r15d,%r13d + rorl $11,%r15d + + xorl %r15d,%r13d + movl %r12d,%r14d + + shrl $10,%r12d + rorl $17,%r14d + + xorl %r14d,%r12d + rorl $2,%r14d + + xorl %r14d,%r12d + + addl %r13d,%r12d + + addl 12(%rsp),%r12d + + addl 40(%rsp),%r12d + movl %ecx,%r13d + movl %ecx,%r14d + movl %edx,%r15d + + rorl $6,%r13d + rorl $11,%r14d + xorl %r8d,%r15d + + xorl %r14d,%r13d + rorl $14,%r14d + andl %ecx,%r15d + movl %r12d,40(%rsp) + + xorl %r14d,%r13d + xorl %r8d,%r15d + addl %r9d,%r12d + + movl %r10d,%r9d + addl %r13d,%r12d + + addl %r15d,%r12d + movl %r10d,%r13d + movl %r10d,%r14d + + rorl $2,%r9d + rorl $13,%r13d + movl %r10d,%r15d + addl (%rbp,%rdi,4),%r12d + + xorl %r13d,%r9d + rorl $9,%r13d + orl %eax,%r14d + + xorl %r13d,%r9d + andl %eax,%r15d + addl %r12d,%ebx + + andl %r11d,%r14d + addl %r12d,%r9d + + orl %r15d,%r14d + leaq 1(%rdi),%rdi + + addl %r14d,%r9d + movl 48(%rsp),%r13d + movl 36(%rsp),%r12d + + movl %r13d,%r15d + + shrl $3,%r13d + rorl $7,%r15d + + xorl %r15d,%r13d + rorl $11,%r15d + + xorl %r15d,%r13d + movl %r12d,%r14d + + shrl $10,%r12d + rorl $17,%r14d + + xorl %r14d,%r12d + rorl $2,%r14d + + xorl %r14d,%r12d + + addl %r13d,%r12d + + addl 16(%rsp),%r12d + + addl 44(%rsp),%r12d + movl %ebx,%r13d + movl %ebx,%r14d + movl %ecx,%r15d + + rorl $6,%r13d + rorl $11,%r14d + xorl %edx,%r15d + + xorl %r14d,%r13d + rorl $14,%r14d + andl %ebx,%r15d + movl %r12d,44(%rsp) + + xorl %r14d,%r13d + xorl %edx,%r15d + addl %r8d,%r12d + + movl %r9d,%r8d + addl %r13d,%r12d + + addl %r15d,%r12d + movl %r9d,%r13d + movl %r9d,%r14d + + rorl $2,%r8d + rorl $13,%r13d + movl %r9d,%r15d + addl (%rbp,%rdi,4),%r12d + + xorl %r13d,%r8d + rorl $9,%r13d + orl %r11d,%r14d + + xorl %r13d,%r8d + andl %r11d,%r15d + addl %r12d,%eax + + andl %r10d,%r14d + addl %r12d,%r8d + + orl %r15d,%r14d + leaq 1(%rdi),%rdi + + addl %r14d,%r8d + movl 52(%rsp),%r13d + movl 40(%rsp),%r12d + + movl %r13d,%r15d + + shrl $3,%r13d + rorl $7,%r15d + + xorl %r15d,%r13d + rorl $11,%r15d + + xorl %r15d,%r13d + movl %r12d,%r14d + + shrl $10,%r12d + rorl $17,%r14d + + xorl %r14d,%r12d + rorl $2,%r14d + + xorl %r14d,%r12d + + addl %r13d,%r12d + + addl 20(%rsp),%r12d + + addl 48(%rsp),%r12d + movl %eax,%r13d + movl %eax,%r14d + movl %ebx,%r15d + + rorl $6,%r13d + rorl $11,%r14d + xorl %ecx,%r15d + + xorl %r14d,%r13d + rorl $14,%r14d + andl %eax,%r15d + movl %r12d,48(%rsp) + + xorl %r14d,%r13d + xorl %ecx,%r15d + addl %edx,%r12d + + movl %r8d,%edx + addl %r13d,%r12d + + addl %r15d,%r12d + movl %r8d,%r13d + movl %r8d,%r14d + + rorl $2,%edx + rorl $13,%r13d + movl %r8d,%r15d + addl (%rbp,%rdi,4),%r12d + + xorl %r13d,%edx + rorl $9,%r13d + orl %r10d,%r14d + + xorl %r13d,%edx + andl %r10d,%r15d + addl %r12d,%r11d + + andl %r9d,%r14d + addl %r12d,%edx + + orl %r15d,%r14d + leaq 1(%rdi),%rdi + + addl %r14d,%edx + movl 56(%rsp),%r13d + movl 44(%rsp),%r12d + + movl %r13d,%r15d + + shrl $3,%r13d + rorl $7,%r15d + + xorl %r15d,%r13d + rorl $11,%r15d + + xorl %r15d,%r13d + movl %r12d,%r14d + + shrl $10,%r12d + rorl $17,%r14d + + xorl %r14d,%r12d + rorl $2,%r14d + + xorl %r14d,%r12d + + addl %r13d,%r12d + + addl 24(%rsp),%r12d + + addl 52(%rsp),%r12d + movl %r11d,%r13d + movl %r11d,%r14d + movl %eax,%r15d + + rorl $6,%r13d + rorl $11,%r14d + xorl %ebx,%r15d + + xorl %r14d,%r13d + rorl $14,%r14d + andl %r11d,%r15d + movl %r12d,52(%rsp) + + xorl %r14d,%r13d + xorl %ebx,%r15d + addl %ecx,%r12d + + movl %edx,%ecx + addl %r13d,%r12d + + addl %r15d,%r12d + movl %edx,%r13d + movl %edx,%r14d + + rorl $2,%ecx + rorl $13,%r13d + movl %edx,%r15d + addl (%rbp,%rdi,4),%r12d + + xorl %r13d,%ecx + rorl $9,%r13d + orl %r9d,%r14d + + xorl %r13d,%ecx + andl %r9d,%r15d + addl %r12d,%r10d + + andl %r8d,%r14d + addl %r12d,%ecx + + orl %r15d,%r14d + leaq 1(%rdi),%rdi + + addl %r14d,%ecx + movl 60(%rsp),%r13d + movl 48(%rsp),%r12d + + movl %r13d,%r15d + + shrl $3,%r13d + rorl $7,%r15d + + xorl %r15d,%r13d + rorl $11,%r15d + + xorl %r15d,%r13d + movl %r12d,%r14d + + shrl $10,%r12d + rorl $17,%r14d + + xorl %r14d,%r12d + rorl $2,%r14d + + xorl %r14d,%r12d + + addl %r13d,%r12d + + addl 28(%rsp),%r12d + + addl 56(%rsp),%r12d + movl %r10d,%r13d + movl %r10d,%r14d + movl %r11d,%r15d + + rorl $6,%r13d + rorl $11,%r14d + xorl %eax,%r15d + + xorl %r14d,%r13d + rorl $14,%r14d + andl %r10d,%r15d + movl %r12d,56(%rsp) + + xorl %r14d,%r13d + xorl %eax,%r15d + addl %ebx,%r12d + + movl %ecx,%ebx + addl %r13d,%r12d + + addl %r15d,%r12d + movl %ecx,%r13d + movl %ecx,%r14d + + rorl $2,%ebx + rorl $13,%r13d + movl %ecx,%r15d + addl (%rbp,%rdi,4),%r12d + + xorl %r13d,%ebx + rorl $9,%r13d + orl %r8d,%r14d + + xorl %r13d,%ebx + andl %r8d,%r15d + addl %r12d,%r9d + + andl %edx,%r14d + addl %r12d,%ebx + + orl %r15d,%r14d + leaq 1(%rdi),%rdi + + addl %r14d,%ebx + movl 0(%rsp),%r13d + movl 52(%rsp),%r12d + + movl %r13d,%r15d + + shrl $3,%r13d + rorl $7,%r15d + + xorl %r15d,%r13d + rorl $11,%r15d + + xorl %r15d,%r13d + movl %r12d,%r14d + + shrl $10,%r12d + rorl $17,%r14d + + xorl %r14d,%r12d + rorl $2,%r14d + + xorl %r14d,%r12d + + addl %r13d,%r12d + + addl 32(%rsp),%r12d + + addl 60(%rsp),%r12d + movl %r9d,%r13d + movl %r9d,%r14d + movl %r10d,%r15d + + rorl $6,%r13d + rorl $11,%r14d + xorl %r11d,%r15d + + xorl %r14d,%r13d + rorl $14,%r14d + andl %r9d,%r15d + movl %r12d,60(%rsp) + + xorl %r14d,%r13d + xorl %r11d,%r15d + addl %eax,%r12d + + movl %ebx,%eax + addl %r13d,%r12d + + addl %r15d,%r12d + movl %ebx,%r13d + movl %ebx,%r14d + + rorl $2,%eax + rorl $13,%r13d + movl %ebx,%r15d + addl (%rbp,%rdi,4),%r12d + + xorl %r13d,%eax + rorl $9,%r13d + orl %edx,%r14d + + xorl %r13d,%eax + andl %edx,%r15d + addl %r12d,%r8d + + andl %ecx,%r14d + addl %r12d,%eax + + orl %r15d,%r14d + leaq 1(%rdi),%rdi + + addl %r14d,%eax + cmpq $64,%rdi + jb .Lrounds_16_xx + + movq 64+0(%rsp),%rdi + leaq 64(%rsi),%rsi + + addl 0(%rdi),%eax + addl 4(%rdi),%ebx + addl 8(%rdi),%ecx + addl 12(%rdi),%edx + addl 16(%rdi),%r8d + addl 20(%rdi),%r9d + addl 24(%rdi),%r10d + addl 28(%rdi),%r11d + + cmpq 64+16(%rsp),%rsi + + movl %eax,0(%rdi) + movl %ebx,4(%rdi) + movl %ecx,8(%rdi) + movl %edx,12(%rdi) + movl %r8d,16(%rdi) + movl %r9d,20(%rdi) + movl %r10d,24(%rdi) + movl %r11d,28(%rdi) + jb .Lloop + + movq 64+24(%rsp),%rsp + popq %r15 + popq %r14 + popq %r13 + popq %r12 + popq %rbp + popq %rbx + + .byte 0xf3,0xc3 +.size sha256_block_data_order,.-sha256_block_data_order +.align 64 +.type K256,@object +K256: +.long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5 +.long 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5 +.long 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3 +.long 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174 +.long 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc +.long 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da +.long 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7 +.long 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967 +.long 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13 +.long 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85 +.long 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3 +.long 0xd192e819,0xd6990624,0xf40e3585,0x106aa070 +.long 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5 +.long 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3 +.long 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208 +.long 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2 Property changes on: secure/lib/libcrypto/amd64/sha256-x86_64.S ___________________________________________________________________ Added: svn:mime-type + text/plain Added: svn:keywords + FreeBSD=%H Added: svn:eol-style + native Index: secure/lib/libcrypto/amd64/sha1-x86_64.S =================================================================== --- secure/lib/libcrypto/amd64/sha1-x86_64.S (revision 0) +++ secure/lib/libcrypto/amd64/sha1-x86_64.S (revision 0) @@ -0,0 +1,1279 @@ + # $FreeBSD$ +.text +.globl sha1_block_data_order +.type sha1_block_data_order,@function +.align 16 +sha1_block_data_order: + pushq %rbx + pushq %rbp + pushq %r12 + movq %rsp,%rax + movq %rdi,%r8 + subq $72,%rsp + movq %rsi,%r9 + andq $-64,%rsp + movq %rdx,%r10 + movq %rax,64(%rsp) + + movl 0(%r8),%edx + movl 4(%r8),%esi + movl 8(%r8),%edi + movl 12(%r8),%ebp + movl 16(%r8),%r11d +.align 4 +.Lloop: + movl 0(%r9),%eax + bswapl %eax + movl %eax,0(%rsp) + leal 1518500249(%rax,%r11,1),%r12d + movl %edi,%ebx + movl 4(%r9),%eax + movl %edx,%r11d + xorl %ebp,%ebx + bswapl %eax + roll $5,%r11d + andl %esi,%ebx + movl %eax,4(%rsp) + addl %r11d,%r12d + xorl %ebp,%ebx + roll $30,%esi + addl %ebx,%r12d + leal 1518500249(%rax,%rbp,1),%r11d + movl %esi,%ebx + movl 8(%r9),%eax + movl %r12d,%ebp + xorl %edi,%ebx + bswapl %eax + roll $5,%ebp + andl %edx,%ebx + movl %eax,8(%rsp) + addl %ebp,%r11d + xorl %edi,%ebx + roll $30,%edx + addl %ebx,%r11d + leal 1518500249(%rax,%rdi,1),%ebp + movl %edx,%ebx + movl 12(%r9),%eax + movl %r11d,%edi + xorl %esi,%ebx + bswapl %eax + roll $5,%edi + andl %r12d,%ebx + movl %eax,12(%rsp) + addl %edi,%ebp + xorl %esi,%ebx + roll $30,%r12d + addl %ebx,%ebp + leal 1518500249(%rax,%rsi,1),%edi + movl %r12d,%ebx + movl 16(%r9),%eax + movl %ebp,%esi + xorl %edx,%ebx + bswapl %eax + roll $5,%esi + andl %r11d,%ebx + movl %eax,16(%rsp) + addl %esi,%edi + xorl %edx,%ebx + roll $30,%r11d + addl %ebx,%edi + leal 1518500249(%rax,%rdx,1),%esi + movl %r11d,%ebx + movl 20(%r9),%eax + movl %edi,%edx + xorl %r12d,%ebx + bswapl %eax + roll $5,%edx + andl %ebp,%ebx + movl %eax,20(%rsp) + addl %edx,%esi + xorl %r12d,%ebx + roll $30,%ebp + addl %ebx,%esi + leal 1518500249(%rax,%r12,1),%edx + movl %ebp,%ebx + movl 24(%r9),%eax + movl %esi,%r12d + xorl %r11d,%ebx + bswapl %eax + roll $5,%r12d + andl %edi,%ebx + movl %eax,24(%rsp) + addl %r12d,%edx + xorl %r11d,%ebx + roll $30,%edi + addl %ebx,%edx + leal 1518500249(%rax,%r11,1),%r12d + movl %edi,%ebx + movl 28(%r9),%eax + movl %edx,%r11d + xorl %ebp,%ebx + bswapl %eax + roll $5,%r11d + andl %esi,%ebx + movl %eax,28(%rsp) + addl %r11d,%r12d + xorl %ebp,%ebx + roll $30,%esi + addl %ebx,%r12d + leal 1518500249(%rax,%rbp,1),%r11d + movl %esi,%ebx + movl 32(%r9),%eax + movl %r12d,%ebp + xorl %edi,%ebx + bswapl %eax + roll $5,%ebp + andl %edx,%ebx + movl %eax,32(%rsp) + addl %ebp,%r11d + xorl %edi,%ebx + roll $30,%edx + addl %ebx,%r11d + leal 1518500249(%rax,%rdi,1),%ebp + movl %edx,%ebx + movl 36(%r9),%eax + movl %r11d,%edi + xorl %esi,%ebx + bswapl %eax + roll $5,%edi + andl %r12d,%ebx + movl %eax,36(%rsp) + addl %edi,%ebp + xorl %esi,%ebx + roll $30,%r12d + addl %ebx,%ebp + leal 1518500249(%rax,%rsi,1),%edi + movl %r12d,%ebx + movl 40(%r9),%eax + movl %ebp,%esi + xorl %edx,%ebx + bswapl %eax + roll $5,%esi + andl %r11d,%ebx + movl %eax,40(%rsp) + addl %esi,%edi + xorl %edx,%ebx + roll $30,%r11d + addl %ebx,%edi + leal 1518500249(%rax,%rdx,1),%esi + movl %r11d,%ebx + movl 44(%r9),%eax + movl %edi,%edx + xorl %r12d,%ebx + bswapl %eax + roll $5,%edx + andl %ebp,%ebx + movl %eax,44(%rsp) + addl %edx,%esi + xorl %r12d,%ebx + roll $30,%ebp + addl %ebx,%esi + leal 1518500249(%rax,%r12,1),%edx + movl %ebp,%ebx + movl 48(%r9),%eax + movl %esi,%r12d + xorl %r11d,%ebx + bswapl %eax + roll $5,%r12d + andl %edi,%ebx + movl %eax,48(%rsp) + addl %r12d,%edx + xorl %r11d,%ebx + roll $30,%edi + addl %ebx,%edx + leal 1518500249(%rax,%r11,1),%r12d + movl %edi,%ebx + movl 52(%r9),%eax + movl %edx,%r11d + xorl %ebp,%ebx + bswapl %eax + roll $5,%r11d + andl %esi,%ebx + movl %eax,52(%rsp) + addl %r11d,%r12d + xorl %ebp,%ebx + roll $30,%esi + addl %ebx,%r12d + leal 1518500249(%rax,%rbp,1),%r11d + movl %esi,%ebx + movl 56(%r9),%eax + movl %r12d,%ebp + xorl %edi,%ebx + bswapl %eax + roll $5,%ebp + andl %edx,%ebx + movl %eax,56(%rsp) + addl %ebp,%r11d + xorl %edi,%ebx + roll $30,%edx + addl %ebx,%r11d + leal 1518500249(%rax,%rdi,1),%ebp + movl %edx,%ebx + movl 60(%r9),%eax + movl %r11d,%edi + xorl %esi,%ebx + bswapl %eax + roll $5,%edi + andl %r12d,%ebx + movl %eax,60(%rsp) + addl %edi,%ebp + xorl %esi,%ebx + roll $30,%r12d + addl %ebx,%ebp + leal 1518500249(%rax,%rsi,1),%edi + movl 0(%rsp),%eax + movl %r12d,%ebx + movl %ebp,%esi + xorl 8(%rsp),%eax + xorl %edx,%ebx + roll $5,%esi + xorl 32(%rsp),%eax + andl %r11d,%ebx + addl %esi,%edi + xorl 52(%rsp),%eax + xorl %edx,%ebx + roll $30,%r11d + addl %ebx,%edi + roll $1,%eax + movl %eax,0(%rsp) + leal 1518500249(%rax,%rdx,1),%esi + movl 4(%rsp),%eax + movl %r11d,%ebx + movl %edi,%edx + xorl 12(%rsp),%eax + xorl %r12d,%ebx + roll $5,%edx + xorl 36(%rsp),%eax + andl %ebp,%ebx + addl %edx,%esi + xorl 56(%rsp),%eax + xorl %r12d,%ebx + roll $30,%ebp + addl %ebx,%esi + roll $1,%eax + movl %eax,4(%rsp) + leal 1518500249(%rax,%r12,1),%edx + movl 8(%rsp),%eax + movl %ebp,%ebx + movl %esi,%r12d + xorl 16(%rsp),%eax + xorl %r11d,%ebx + roll $5,%r12d + xorl 40(%rsp),%eax + andl %edi,%ebx + addl %r12d,%edx + xorl 60(%rsp),%eax + xorl %r11d,%ebx + roll $30,%edi + addl %ebx,%edx + roll $1,%eax + movl %eax,8(%rsp) + leal 1518500249(%rax,%r11,1),%r12d + movl 12(%rsp),%eax + movl %edi,%ebx + movl %edx,%r11d + xorl 20(%rsp),%eax + xorl %ebp,%ebx + roll $5,%r11d + xorl 44(%rsp),%eax + andl %esi,%ebx + addl %r11d,%r12d + xorl 0(%rsp),%eax + xorl %ebp,%ebx + roll $30,%esi + addl %ebx,%r12d + roll $1,%eax + movl %eax,12(%rsp) + leal 1518500249(%rax,%rbp,1),%r11d + movl 16(%rsp),%eax + movl %esi,%ebx + movl %r12d,%ebp + xorl 24(%rsp),%eax + xorl %edi,%ebx + roll $5,%ebp + xorl 48(%rsp),%eax + andl %edx,%ebx + addl %ebp,%r11d + xorl 4(%rsp),%eax + xorl %edi,%ebx + roll $30,%edx + addl %ebx,%r11d + roll $1,%eax + movl %eax,16(%rsp) + leal 1859775393(%rax,%rdi,1),%ebp + movl 20(%rsp),%eax + movl %edx,%ebx + movl %r11d,%edi + xorl 28(%rsp),%eax + xorl %r12d,%ebx + roll $5,%edi + xorl 52(%rsp),%eax + xorl %esi,%ebx + addl %edi,%ebp + xorl 8(%rsp),%eax + roll $30,%r12d + addl %ebx,%ebp + roll $1,%eax + movl %eax,20(%rsp) + leal 1859775393(%rax,%rsi,1),%edi + movl 24(%rsp),%eax + movl %r12d,%ebx + movl %ebp,%esi + xorl 32(%rsp),%eax + xorl %r11d,%ebx + roll $5,%esi + xorl 56(%rsp),%eax + xorl %edx,%ebx + addl %esi,%edi + xorl 12(%rsp),%eax + roll $30,%r11d + addl %ebx,%edi + roll $1,%eax + movl %eax,24(%rsp) + leal 1859775393(%rax,%rdx,1),%esi + movl 28(%rsp),%eax + movl %r11d,%ebx + movl %edi,%edx + xorl 36(%rsp),%eax + xorl %ebp,%ebx + roll $5,%edx + xorl 60(%rsp),%eax + xorl %r12d,%ebx + addl %edx,%esi + xorl 16(%rsp),%eax + roll $30,%ebp + addl %ebx,%esi + roll $1,%eax + movl %eax,28(%rsp) + leal 1859775393(%rax,%r12,1),%edx + movl 32(%rsp),%eax + movl %ebp,%ebx + movl %esi,%r12d + xorl 40(%rsp),%eax + xorl %edi,%ebx + roll $5,%r12d + xorl 0(%rsp),%eax + xorl %r11d,%ebx + addl %r12d,%edx + xorl 20(%rsp),%eax + roll $30,%edi + addl %ebx,%edx + roll $1,%eax + movl %eax,32(%rsp) + leal 1859775393(%rax,%r11,1),%r12d + movl 36(%rsp),%eax + movl %edi,%ebx + movl %edx,%r11d + xorl 44(%rsp),%eax + xorl %esi,%ebx + roll $5,%r11d + xorl 4(%rsp),%eax + xorl %ebp,%ebx + addl %r11d,%r12d + xorl 24(%rsp),%eax + roll $30,%esi + addl %ebx,%r12d + roll $1,%eax + movl %eax,36(%rsp) + leal 1859775393(%rax,%rbp,1),%r11d + movl 40(%rsp),%eax + movl %esi,%ebx + movl %r12d,%ebp + xorl 48(%rsp),%eax + xorl %edx,%ebx + roll $5,%ebp + xorl 8(%rsp),%eax + xorl %edi,%ebx + addl %ebp,%r11d + xorl 28(%rsp),%eax + roll $30,%edx + addl %ebx,%r11d + roll $1,%eax + movl %eax,40(%rsp) + leal 1859775393(%rax,%rdi,1),%ebp + movl 44(%rsp),%eax + movl %edx,%ebx + movl %r11d,%edi + xorl 52(%rsp),%eax + xorl %r12d,%ebx + roll $5,%edi + xorl 12(%rsp),%eax + xorl %esi,%ebx + addl %edi,%ebp + xorl 32(%rsp),%eax + roll $30,%r12d + addl %ebx,%ebp + roll $1,%eax + movl %eax,44(%rsp) + leal 1859775393(%rax,%rsi,1),%edi + movl 48(%rsp),%eax + movl %r12d,%ebx + movl %ebp,%esi + xorl 56(%rsp),%eax + xorl %r11d,%ebx + roll $5,%esi + xorl 16(%rsp),%eax + xorl %edx,%ebx + addl %esi,%edi + xorl 36(%rsp),%eax + roll $30,%r11d + addl %ebx,%edi + roll $1,%eax + movl %eax,48(%rsp) + leal 1859775393(%rax,%rdx,1),%esi + movl 52(%rsp),%eax + movl %r11d,%ebx + movl %edi,%edx + xorl 60(%rsp),%eax + xorl %ebp,%ebx + roll $5,%edx + xorl 20(%rsp),%eax + xorl %r12d,%ebx + addl %edx,%esi + xorl 40(%rsp),%eax + roll $30,%ebp + addl %ebx,%esi + roll $1,%eax + movl %eax,52(%rsp) + leal 1859775393(%rax,%r12,1),%edx + movl 56(%rsp),%eax + movl %ebp,%ebx + movl %esi,%r12d + xorl 0(%rsp),%eax + xorl %edi,%ebx + roll $5,%r12d + xorl 24(%rsp),%eax + xorl %r11d,%ebx + addl %r12d,%edx + xorl 44(%rsp),%eax + roll $30,%edi + addl %ebx,%edx + roll $1,%eax + movl %eax,56(%rsp) + leal 1859775393(%rax,%r11,1),%r12d + movl 60(%rsp),%eax + movl %edi,%ebx + movl %edx,%r11d + xorl 4(%rsp),%eax + xorl %esi,%ebx + roll $5,%r11d + xorl 28(%rsp),%eax + xorl %ebp,%ebx + addl %r11d,%r12d + xorl 48(%rsp),%eax + roll $30,%esi + addl %ebx,%r12d + roll $1,%eax + movl %eax,60(%rsp) + leal 1859775393(%rax,%rbp,1),%r11d + movl 0(%rsp),%eax + movl %esi,%ebx + movl %r12d,%ebp + xorl 8(%rsp),%eax + xorl %edx,%ebx + roll $5,%ebp + xorl 32(%rsp),%eax + xorl %edi,%ebx + addl %ebp,%r11d + xorl 52(%rsp),%eax + roll $30,%edx + addl %ebx,%r11d + roll $1,%eax + movl %eax,0(%rsp) + leal 1859775393(%rax,%rdi,1),%ebp + movl 4(%rsp),%eax + movl %edx,%ebx + movl %r11d,%edi + xorl 12(%rsp),%eax + xorl %r12d,%ebx + roll $5,%edi + xorl 36(%rsp),%eax + xorl %esi,%ebx + addl %edi,%ebp + xorl 56(%rsp),%eax + roll $30,%r12d + addl %ebx,%ebp + roll $1,%eax + movl %eax,4(%rsp) + leal 1859775393(%rax,%rsi,1),%edi + movl 8(%rsp),%eax + movl %r12d,%ebx + movl %ebp,%esi + xorl 16(%rsp),%eax + xorl %r11d,%ebx + roll $5,%esi + xorl 40(%rsp),%eax + xorl %edx,%ebx + addl %esi,%edi + xorl 60(%rsp),%eax + roll $30,%r11d + addl %ebx,%edi + roll $1,%eax + movl %eax,8(%rsp) + leal 1859775393(%rax,%rdx,1),%esi + movl 12(%rsp),%eax + movl %r11d,%ebx + movl %edi,%edx + xorl 20(%rsp),%eax + xorl %ebp,%ebx + roll $5,%edx + xorl 44(%rsp),%eax + xorl %r12d,%ebx + addl %edx,%esi + xorl 0(%rsp),%eax + roll $30,%ebp + addl %ebx,%esi + roll $1,%eax + movl %eax,12(%rsp) + leal 1859775393(%rax,%r12,1),%edx + movl 16(%rsp),%eax + movl %ebp,%ebx + movl %esi,%r12d + xorl 24(%rsp),%eax + xorl %edi,%ebx + roll $5,%r12d + xorl 48(%rsp),%eax + xorl %r11d,%ebx + addl %r12d,%edx + xorl 4(%rsp),%eax + roll $30,%edi + addl %ebx,%edx + roll $1,%eax + movl %eax,16(%rsp) + leal 1859775393(%rax,%r11,1),%r12d + movl 20(%rsp),%eax + movl %edi,%ebx + movl %edx,%r11d + xorl 28(%rsp),%eax + xorl %esi,%ebx + roll $5,%r11d + xorl 52(%rsp),%eax + xorl %ebp,%ebx + addl %r11d,%r12d + xorl 8(%rsp),%eax + roll $30,%esi + addl %ebx,%r12d + roll $1,%eax + movl %eax,20(%rsp) + leal 1859775393(%rax,%rbp,1),%r11d + movl 24(%rsp),%eax + movl %esi,%ebx + movl %r12d,%ebp + xorl 32(%rsp),%eax + xorl %edx,%ebx + roll $5,%ebp + xorl 56(%rsp),%eax + xorl %edi,%ebx + addl %ebp,%r11d + xorl 12(%rsp),%eax + roll $30,%edx + addl %ebx,%r11d + roll $1,%eax + movl %eax,24(%rsp) + leal 1859775393(%rax,%rdi,1),%ebp + movl 28(%rsp),%eax + movl %edx,%ebx + movl %r11d,%edi + xorl 36(%rsp),%eax + xorl %r12d,%ebx + roll $5,%edi + xorl 60(%rsp),%eax + xorl %esi,%ebx + addl %edi,%ebp + xorl 16(%rsp),%eax + roll $30,%r12d + addl %ebx,%ebp + roll $1,%eax + movl %eax,28(%rsp) + leal 1859775393(%rax,%rsi,1),%edi + movl 32(%rsp),%eax + movl %r12d,%ebx + movl %ebp,%esi + xorl 40(%rsp),%eax + xorl %r11d,%ebx + roll $5,%esi + xorl 0(%rsp),%eax + xorl %edx,%ebx + addl %esi,%edi + xorl 20(%rsp),%eax + roll $30,%r11d + addl %ebx,%edi + roll $1,%eax + movl %eax,32(%rsp) + leal -1894007588(%rax,%rdx,1),%esi + movl 36(%rsp),%eax + movl %ebp,%ebx + movl %ebp,%ecx + xorl 44(%rsp),%eax + movl %edi,%edx + andl %r11d,%ebx + xorl 4(%rsp),%eax + orl %r11d,%ecx + roll $5,%edx + xorl 24(%rsp),%eax + andl %r12d,%ecx + addl %edx,%esi + roll $1,%eax + orl %ecx,%ebx + roll $30,%ebp + movl %eax,36(%rsp) + addl %ebx,%esi + leal -1894007588(%rax,%r12,1),%edx + movl 40(%rsp),%eax + movl %edi,%ebx + movl %edi,%ecx + xorl 48(%rsp),%eax + movl %esi,%r12d + andl %ebp,%ebx + xorl 8(%rsp),%eax + orl %ebp,%ecx + roll $5,%r12d + xorl 28(%rsp),%eax + andl %r11d,%ecx + addl %r12d,%edx + roll $1,%eax + orl %ecx,%ebx + roll $30,%edi + movl %eax,40(%rsp) + addl %ebx,%edx + leal -1894007588(%rax,%r11,1),%r12d + movl 44(%rsp),%eax + movl %esi,%ebx + movl %esi,%ecx + xorl 52(%rsp),%eax + movl %edx,%r11d + andl %edi,%ebx + xorl 12(%rsp),%eax + orl %edi,%ecx + roll $5,%r11d + xorl 32(%rsp),%eax + andl %ebp,%ecx + addl %r11d,%r12d + roll $1,%eax + orl %ecx,%ebx + roll $30,%esi + movl %eax,44(%rsp) + addl %ebx,%r12d + leal -1894007588(%rax,%rbp,1),%r11d + movl 48(%rsp),%eax + movl %edx,%ebx + movl %edx,%ecx + xorl 56(%rsp),%eax + movl %r12d,%ebp + andl %esi,%ebx + xorl 16(%rsp),%eax + orl %esi,%ecx + roll $5,%ebp + xorl 36(%rsp),%eax + andl %edi,%ecx + addl %ebp,%r11d + roll $1,%eax + orl %ecx,%ebx + roll $30,%edx + movl %eax,48(%rsp) + addl %ebx,%r11d + leal -1894007588(%rax,%rdi,1),%ebp + movl 52(%rsp),%eax + movl %r12d,%ebx + movl %r12d,%ecx + xorl 60(%rsp),%eax + movl %r11d,%edi + andl %edx,%ebx + xorl 20(%rsp),%eax + orl %edx,%ecx + roll $5,%edi + xorl 40(%rsp),%eax + andl %esi,%ecx + addl %edi,%ebp + roll $1,%eax + orl %ecx,%ebx + roll $30,%r12d + movl %eax,52(%rsp) + addl %ebx,%ebp + leal -1894007588(%rax,%rsi,1),%edi + movl 56(%rsp),%eax + movl %r11d,%ebx + movl %r11d,%ecx + xorl 0(%rsp),%eax + movl %ebp,%esi + andl %r12d,%ebx + xorl 24(%rsp),%eax + orl %r12d,%ecx + roll $5,%esi + xorl 44(%rsp),%eax + andl %edx,%ecx + addl %esi,%edi + roll $1,%eax + orl %ecx,%ebx + roll $30,%r11d + movl %eax,56(%rsp) + addl %ebx,%edi + leal -1894007588(%rax,%rdx,1),%esi + movl 60(%rsp),%eax + movl %ebp,%ebx + movl %ebp,%ecx + xorl 4(%rsp),%eax + movl %edi,%edx + andl %r11d,%ebx + xorl 28(%rsp),%eax + orl %r11d,%ecx + roll $5,%edx + xorl 48(%rsp),%eax + andl %r12d,%ecx + addl %edx,%esi + roll $1,%eax + orl %ecx,%ebx + roll $30,%ebp + movl %eax,60(%rsp) + addl %ebx,%esi + leal -1894007588(%rax,%r12,1),%edx + movl 0(%rsp),%eax + movl %edi,%ebx + movl %edi,%ecx + xorl 8(%rsp),%eax + movl %esi,%r12d + andl %ebp,%ebx + xorl 32(%rsp),%eax + orl %ebp,%ecx + roll $5,%r12d + xorl 52(%rsp),%eax + andl %r11d,%ecx + addl %r12d,%edx + roll $1,%eax + orl %ecx,%ebx + roll $30,%edi + movl %eax,0(%rsp) + addl %ebx,%edx + leal -1894007588(%rax,%r11,1),%r12d + movl 4(%rsp),%eax + movl %esi,%ebx + movl %esi,%ecx + xorl 12(%rsp),%eax + movl %edx,%r11d + andl %edi,%ebx + xorl 36(%rsp),%eax + orl %edi,%ecx + roll $5,%r11d + xorl 56(%rsp),%eax + andl %ebp,%ecx + addl %r11d,%r12d + roll $1,%eax + orl %ecx,%ebx + roll $30,%esi + movl %eax,4(%rsp) + addl %ebx,%r12d + leal -1894007588(%rax,%rbp,1),%r11d + movl 8(%rsp),%eax + movl %edx,%ebx + movl %edx,%ecx + xorl 16(%rsp),%eax + movl %r12d,%ebp + andl %esi,%ebx + xorl 40(%rsp),%eax + orl %esi,%ecx + roll $5,%ebp + xorl 60(%rsp),%eax + andl %edi,%ecx + addl %ebp,%r11d + roll $1,%eax + orl %ecx,%ebx + roll $30,%edx + movl %eax,8(%rsp) + addl %ebx,%r11d + leal -1894007588(%rax,%rdi,1),%ebp + movl 12(%rsp),%eax + movl %r12d,%ebx + movl %r12d,%ecx + xorl 20(%rsp),%eax + movl %r11d,%edi + andl %edx,%ebx + xorl 44(%rsp),%eax + orl %edx,%ecx + roll $5,%edi + xorl 0(%rsp),%eax + andl %esi,%ecx + addl %edi,%ebp + roll $1,%eax + orl %ecx,%ebx + roll $30,%r12d + movl %eax,12(%rsp) + addl %ebx,%ebp + leal -1894007588(%rax,%rsi,1),%edi + movl 16(%rsp),%eax + movl %r11d,%ebx + movl %r11d,%ecx + xorl 24(%rsp),%eax + movl %ebp,%esi + andl %r12d,%ebx + xorl 48(%rsp),%eax + orl %r12d,%ecx + roll $5,%esi + xorl 4(%rsp),%eax + andl %edx,%ecx + addl %esi,%edi + roll $1,%eax + orl %ecx,%ebx + roll $30,%r11d + movl %eax,16(%rsp) + addl %ebx,%edi + leal -1894007588(%rax,%rdx,1),%esi + movl 20(%rsp),%eax + movl %ebp,%ebx + movl %ebp,%ecx + xorl 28(%rsp),%eax + movl %edi,%edx + andl %r11d,%ebx + xorl 52(%rsp),%eax + orl %r11d,%ecx + roll $5,%edx + xorl 8(%rsp),%eax + andl %r12d,%ecx + addl %edx,%esi + roll $1,%eax + orl %ecx,%ebx + roll $30,%ebp + movl %eax,20(%rsp) + addl %ebx,%esi + leal -1894007588(%rax,%r12,1),%edx + movl 24(%rsp),%eax + movl %edi,%ebx + movl %edi,%ecx + xorl 32(%rsp),%eax + movl %esi,%r12d + andl %ebp,%ebx + xorl 56(%rsp),%eax + orl %ebp,%ecx + roll $5,%r12d + xorl 12(%rsp),%eax + andl %r11d,%ecx + addl %r12d,%edx + roll $1,%eax + orl %ecx,%ebx + roll $30,%edi + movl %eax,24(%rsp) + addl %ebx,%edx + leal -1894007588(%rax,%r11,1),%r12d + movl 28(%rsp),%eax + movl %esi,%ebx + movl %esi,%ecx + xorl 36(%rsp),%eax + movl %edx,%r11d + andl %edi,%ebx + xorl 60(%rsp),%eax + orl %edi,%ecx + roll $5,%r11d + xorl 16(%rsp),%eax + andl %ebp,%ecx + addl %r11d,%r12d + roll $1,%eax + orl %ecx,%ebx + roll $30,%esi + movl %eax,28(%rsp) + addl %ebx,%r12d + leal -1894007588(%rax,%rbp,1),%r11d + movl 32(%rsp),%eax + movl %edx,%ebx + movl %edx,%ecx + xorl 40(%rsp),%eax + movl %r12d,%ebp + andl %esi,%ebx + xorl 0(%rsp),%eax + orl %esi,%ecx + roll $5,%ebp + xorl 20(%rsp),%eax + andl %edi,%ecx + addl %ebp,%r11d + roll $1,%eax + orl %ecx,%ebx + roll $30,%edx + movl %eax,32(%rsp) + addl %ebx,%r11d + leal -1894007588(%rax,%rdi,1),%ebp + movl 36(%rsp),%eax + movl %r12d,%ebx + movl %r12d,%ecx + xorl 44(%rsp),%eax + movl %r11d,%edi + andl %edx,%ebx + xorl 4(%rsp),%eax + orl %edx,%ecx + roll $5,%edi + xorl 24(%rsp),%eax + andl %esi,%ecx + addl %edi,%ebp + roll $1,%eax + orl %ecx,%ebx + roll $30,%r12d + movl %eax,36(%rsp) + addl %ebx,%ebp + leal -1894007588(%rax,%rsi,1),%edi + movl 40(%rsp),%eax + movl %r11d,%ebx + movl %r11d,%ecx + xorl 48(%rsp),%eax + movl %ebp,%esi + andl %r12d,%ebx + xorl 8(%rsp),%eax + orl %r12d,%ecx + roll $5,%esi + xorl 28(%rsp),%eax + andl %edx,%ecx + addl %esi,%edi + roll $1,%eax + orl %ecx,%ebx + roll $30,%r11d + movl %eax,40(%rsp) + addl %ebx,%edi + leal -1894007588(%rax,%rdx,1),%esi + movl 44(%rsp),%eax + movl %ebp,%ebx + movl %ebp,%ecx + xorl 52(%rsp),%eax + movl %edi,%edx + andl %r11d,%ebx + xorl 12(%rsp),%eax + orl %r11d,%ecx + roll $5,%edx + xorl 32(%rsp),%eax + andl %r12d,%ecx + addl %edx,%esi + roll $1,%eax + orl %ecx,%ebx + roll $30,%ebp + movl %eax,44(%rsp) + addl %ebx,%esi + leal -1894007588(%rax,%r12,1),%edx + movl 48(%rsp),%eax + movl %edi,%ebx + movl %edi,%ecx + xorl 56(%rsp),%eax + movl %esi,%r12d + andl %ebp,%ebx + xorl 16(%rsp),%eax + orl %ebp,%ecx + roll $5,%r12d + xorl 36(%rsp),%eax + andl %r11d,%ecx + addl %r12d,%edx + roll $1,%eax + orl %ecx,%ebx + roll $30,%edi + movl %eax,48(%rsp) + addl %ebx,%edx + leal -899497514(%rax,%r11,1),%r12d + movl 52(%rsp),%eax + movl %edi,%ebx + movl %edx,%r11d + xorl 60(%rsp),%eax + xorl %esi,%ebx + roll $5,%r11d + xorl 20(%rsp),%eax + xorl %ebp,%ebx + addl %r11d,%r12d + xorl 40(%rsp),%eax + roll $30,%esi + addl %ebx,%r12d + roll $1,%eax + movl %eax,52(%rsp) + leal -899497514(%rax,%rbp,1),%r11d + movl 56(%rsp),%eax + movl %esi,%ebx + movl %r12d,%ebp + xorl 0(%rsp),%eax + xorl %edx,%ebx + roll $5,%ebp + xorl 24(%rsp),%eax + xorl %edi,%ebx + addl %ebp,%r11d + xorl 44(%rsp),%eax + roll $30,%edx + addl %ebx,%r11d + roll $1,%eax + movl %eax,56(%rsp) + leal -899497514(%rax,%rdi,1),%ebp + movl 60(%rsp),%eax + movl %edx,%ebx + movl %r11d,%edi + xorl 4(%rsp),%eax + xorl %r12d,%ebx + roll $5,%edi + xorl 28(%rsp),%eax + xorl %esi,%ebx + addl %edi,%ebp + xorl 48(%rsp),%eax + roll $30,%r12d + addl %ebx,%ebp + roll $1,%eax + movl %eax,60(%rsp) + leal -899497514(%rax,%rsi,1),%edi + movl 0(%rsp),%eax + movl %r12d,%ebx + movl %ebp,%esi + xorl 8(%rsp),%eax + xorl %r11d,%ebx + roll $5,%esi + xorl 32(%rsp),%eax + xorl %edx,%ebx + addl %esi,%edi + xorl 52(%rsp),%eax + roll $30,%r11d + addl %ebx,%edi + roll $1,%eax + movl %eax,0(%rsp) + leal -899497514(%rax,%rdx,1),%esi + movl 4(%rsp),%eax + movl %r11d,%ebx + movl %edi,%edx + xorl 12(%rsp),%eax + xorl %ebp,%ebx + roll $5,%edx + xorl 36(%rsp),%eax + xorl %r12d,%ebx + addl %edx,%esi + xorl 56(%rsp),%eax + roll $30,%ebp + addl %ebx,%esi + roll $1,%eax + movl %eax,4(%rsp) + leal -899497514(%rax,%r12,1),%edx + movl 8(%rsp),%eax + movl %ebp,%ebx + movl %esi,%r12d + xorl 16(%rsp),%eax + xorl %edi,%ebx + roll $5,%r12d + xorl 40(%rsp),%eax + xorl %r11d,%ebx + addl %r12d,%edx + xorl 60(%rsp),%eax + roll $30,%edi + addl %ebx,%edx + roll $1,%eax + movl %eax,8(%rsp) + leal -899497514(%rax,%r11,1),%r12d + movl 12(%rsp),%eax + movl %edi,%ebx + movl %edx,%r11d + xorl 20(%rsp),%eax + xorl %esi,%ebx + roll $5,%r11d + xorl 44(%rsp),%eax + xorl %ebp,%ebx + addl %r11d,%r12d + xorl 0(%rsp),%eax + roll $30,%esi + addl %ebx,%r12d + roll $1,%eax + movl %eax,12(%rsp) + leal -899497514(%rax,%rbp,1),%r11d + movl 16(%rsp),%eax + movl %esi,%ebx + movl %r12d,%ebp + xorl 24(%rsp),%eax + xorl %edx,%ebx + roll $5,%ebp + xorl 48(%rsp),%eax + xorl %edi,%ebx + addl %ebp,%r11d + xorl 4(%rsp),%eax + roll $30,%edx + addl %ebx,%r11d + roll $1,%eax + movl %eax,16(%rsp) + leal -899497514(%rax,%rdi,1),%ebp + movl 20(%rsp),%eax + movl %edx,%ebx + movl %r11d,%edi + xorl 28(%rsp),%eax + xorl %r12d,%ebx + roll $5,%edi + xorl 52(%rsp),%eax + xorl %esi,%ebx + addl %edi,%ebp + xorl 8(%rsp),%eax + roll $30,%r12d + addl %ebx,%ebp + roll $1,%eax + movl %eax,20(%rsp) + leal -899497514(%rax,%rsi,1),%edi + movl 24(%rsp),%eax + movl %r12d,%ebx + movl %ebp,%esi + xorl 32(%rsp),%eax + xorl %r11d,%ebx + roll $5,%esi + xorl 56(%rsp),%eax + xorl %edx,%ebx + addl %esi,%edi + xorl 12(%rsp),%eax + roll $30,%r11d + addl %ebx,%edi + roll $1,%eax + movl %eax,24(%rsp) + leal -899497514(%rax,%rdx,1),%esi + movl 28(%rsp),%eax + movl %r11d,%ebx + movl %edi,%edx + xorl 36(%rsp),%eax + xorl %ebp,%ebx + roll $5,%edx + xorl 60(%rsp),%eax + xorl %r12d,%ebx + addl %edx,%esi + xorl 16(%rsp),%eax + roll $30,%ebp + addl %ebx,%esi + roll $1,%eax + movl %eax,28(%rsp) + leal -899497514(%rax,%r12,1),%edx + movl 32(%rsp),%eax + movl %ebp,%ebx + movl %esi,%r12d + xorl 40(%rsp),%eax + xorl %edi,%ebx + roll $5,%r12d + xorl 0(%rsp),%eax + xorl %r11d,%ebx + addl %r12d,%edx + xorl 20(%rsp),%eax + roll $30,%edi + addl %ebx,%edx + roll $1,%eax + movl %eax,32(%rsp) + leal -899497514(%rax,%r11,1),%r12d + movl 36(%rsp),%eax + movl %edi,%ebx + movl %edx,%r11d + xorl 44(%rsp),%eax + xorl %esi,%ebx + roll $5,%r11d + xorl 4(%rsp),%eax + xorl %ebp,%ebx + addl %r11d,%r12d + xorl 24(%rsp),%eax + roll $30,%esi + addl %ebx,%r12d + roll $1,%eax + movl %eax,36(%rsp) + leal -899497514(%rax,%rbp,1),%r11d + movl 40(%rsp),%eax + movl %esi,%ebx + movl %r12d,%ebp + xorl 48(%rsp),%eax + xorl %edx,%ebx + roll $5,%ebp + xorl 8(%rsp),%eax + xorl %edi,%ebx + addl %ebp,%r11d + xorl 28(%rsp),%eax + roll $30,%edx + addl %ebx,%r11d + roll $1,%eax + movl %eax,40(%rsp) + leal -899497514(%rax,%rdi,1),%ebp + movl 44(%rsp),%eax + movl %edx,%ebx + movl %r11d,%edi + xorl 52(%rsp),%eax + xorl %r12d,%ebx + roll $5,%edi + xorl 12(%rsp),%eax + xorl %esi,%ebx + addl %edi,%ebp + xorl 32(%rsp),%eax + roll $30,%r12d + addl %ebx,%ebp + roll $1,%eax + movl %eax,44(%rsp) + leal -899497514(%rax,%rsi,1),%edi + movl 48(%rsp),%eax + movl %r12d,%ebx + movl %ebp,%esi + xorl 56(%rsp),%eax + xorl %r11d,%ebx + roll $5,%esi + xorl 16(%rsp),%eax + xorl %edx,%ebx + addl %esi,%edi + xorl 36(%rsp),%eax + roll $30,%r11d + addl %ebx,%edi + roll $1,%eax + movl %eax,48(%rsp) + leal -899497514(%rax,%rdx,1),%esi + movl 52(%rsp),%eax + movl %r11d,%ebx + movl %edi,%edx + xorl 60(%rsp),%eax + xorl %ebp,%ebx + roll $5,%edx + xorl 20(%rsp),%eax + xorl %r12d,%ebx + addl %edx,%esi + xorl 40(%rsp),%eax + roll $30,%ebp + addl %ebx,%esi + roll $1,%eax + leal -899497514(%rax,%r12,1),%edx + movl 56(%rsp),%eax + movl %ebp,%ebx + movl %esi,%r12d + xorl 0(%rsp),%eax + xorl %edi,%ebx + roll $5,%r12d + xorl 24(%rsp),%eax + xorl %r11d,%ebx + addl %r12d,%edx + xorl 44(%rsp),%eax + roll $30,%edi + addl %ebx,%edx + roll $1,%eax + leal -899497514(%rax,%r11,1),%r12d + movl 60(%rsp),%eax + movl %edi,%ebx + movl %edx,%r11d + xorl 4(%rsp),%eax + xorl %esi,%ebx + roll $5,%r11d + xorl 28(%rsp),%eax + xorl %ebp,%ebx + addl %r11d,%r12d + xorl 48(%rsp),%eax + roll $30,%esi + addl %ebx,%r12d + roll $1,%eax + leal -899497514(%rax,%rbp,1),%r11d + movl %esi,%ebx + movl %r12d,%ebp + xorl %edx,%ebx + roll $5,%ebp + xorl %edi,%ebx + addl %ebp,%r11d + roll $30,%edx + addl %ebx,%r11d + addl 0(%r8),%r11d + addl 4(%r8),%r12d + addl 8(%r8),%edx + addl 12(%r8),%esi + addl 16(%r8),%edi + movl %r11d,0(%r8) + movl %r12d,4(%r8) + movl %edx,8(%r8) + movl %esi,12(%r8) + movl %edi,16(%r8) + + xchgl %r11d,%edx + xchgl %r12d,%esi + xchgl %r11d,%edi + xchgl %r12d,%ebp + + leaq 64(%r9),%r9 + subq $1,%r10 + jnz .Lloop + movq 64(%rsp),%rsp + popq %r12 + popq %rbp + popq %rbx + .byte 0xf3,0xc3 +.size sha1_block_data_order,.-sha1_block_data_order +.byte 83,72,65,49,32,98,108,111,99,107,32,116,114,97,110,115,102,111,114,109,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 Property changes on: secure/lib/libcrypto/amd64/sha1-x86_64.S ___________________________________________________________________ Added: svn:mime-type + text/plain Added: svn:keywords + FreeBSD=%H Added: svn:eol-style + native Index: secure/lib/libcrypto/amd64/x86_64-mont.S =================================================================== --- secure/lib/libcrypto/amd64/x86_64-mont.S (revision 0) +++ secure/lib/libcrypto/amd64/x86_64-mont.S (revision 0) @@ -0,0 +1,168 @@ + # $FreeBSD$ +.text + +.globl bn_mul_mont +.type bn_mul_mont,@function +.align 16 +bn_mul_mont: + pushq %rbx + pushq %rbp + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + + movl %r9d,%r9d + leaq 2(%r9),%rax + movq %rsp,%rbp + negq %rax + leaq (%rsp,%rax,8),%rsp + andq $-1024,%rsp + + movq %rbp,8(%rsp,%r9,8) + movq %rdx,%r12 + + movq (%r8),%r8 + + xorq %r14,%r14 + xorq %r15,%r15 + + movq (%r12),%rbx + movq (%rsi),%rax + mulq %rbx + movq %rax,%r10 + movq %rdx,%r11 + + imulq %r8,%rax + movq %rax,%rbp + + mulq (%rcx) + addq %r10,%rax + adcq $0,%rdx + movq %rdx,%r13 + + leaq 1(%r15),%r15 +.L1st: + movq (%rsi,%r15,8),%rax + mulq %rbx + addq %r11,%rax + adcq $0,%rdx + movq %rax,%r10 + movq (%rcx,%r15,8),%rax + movq %rdx,%r11 + + mulq %rbp + addq %r13,%rax + leaq 1(%r15),%r15 + adcq $0,%rdx + addq %r10,%rax + adcq $0,%rdx + movq %rax,-16(%rsp,%r15,8) + cmpq %r9,%r15 + movq %rdx,%r13 + jl .L1st + + xorq %rdx,%rdx + addq %r11,%r13 + adcq $0,%rdx + movq %r13,-8(%rsp,%r9,8) + movq %rdx,(%rsp,%r9,8) + + leaq 1(%r14),%r14 +.align 4 +.Louter: + xorq %r15,%r15 + + movq (%r12,%r14,8),%rbx + movq (%rsi),%rax + mulq %rbx + addq (%rsp),%rax + adcq $0,%rdx + movq %rax,%r10 + movq %rdx,%r11 + + imulq %r8,%rax + movq %rax,%rbp + + mulq (%rcx,%r15,8) + addq %r10,%rax + movq 8(%rsp),%r10 + adcq $0,%rdx + movq %rdx,%r13 + + leaq 1(%r15),%r15 +.align 4 +.Linner: + movq (%rsi,%r15,8),%rax + mulq %rbx + addq %r11,%rax + adcq $0,%rdx + addq %rax,%r10 + movq (%rcx,%r15,8),%rax + adcq $0,%rdx + movq %rdx,%r11 + + mulq %rbp + addq %r13,%rax + leaq 1(%r15),%r15 + adcq $0,%rdx + addq %r10,%rax + adcq $0,%rdx + movq (%rsp,%r15,8),%r10 + cmpq %r9,%r15 + movq %rax,-16(%rsp,%r15,8) + movq %rdx,%r13 + jl .Linner + + xorq %rdx,%rdx + addq %r11,%r13 + adcq $0,%rdx + addq %r10,%r13 + adcq $0,%rdx + movq %r13,-8(%rsp,%r9,8) + movq %rdx,(%rsp,%r9,8) + + leaq 1(%r14),%r14 + cmpq %r9,%r14 + jl .Louter + + leaq (%rsp),%rsi + leaq -1(%r9),%r15 + + movq (%rsi),%rax + xorq %r14,%r14 + jmp .Lsub +.align 16 +.Lsub: sbbq (%rcx,%r14,8),%rax + movq %rax,(%rdi,%r14,8) + decq %r15 + movq 8(%rsi,%r14,8),%rax + leaq 1(%r14),%r14 + jge .Lsub + + sbbq $0,%rax + andq %rax,%rsi + notq %rax + movq %rdi,%rcx + andq %rax,%rcx + leaq -1(%r9),%r15 + orq %rcx,%rsi +.align 16 +.Lcopy: + movq (%rsi,%r15,8),%rax + movq %rax,(%rdi,%r15,8) + movq %r14,(%rsp,%r15,8) + decq %r15 + jge .Lcopy + + movq 8(%rsp,%r9,8),%rsp + movq $1,%rax + popq %r15 + popq %r14 + popq %r13 + popq %r12 + popq %rbp + popq %rbx + .byte 0xf3,0xc3 +.size bn_mul_mont,.-bn_mul_mont +.byte 77,111,110,116,103,111,109,101,114,121,32,77,117,108,116,105,112,108,105,99,97,116,105,111,110,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 Property changes on: secure/lib/libcrypto/amd64/x86_64-mont.S ___________________________________________________________________ Added: svn:mime-type + text/plain Added: svn:keywords + FreeBSD=%H Added: svn:eol-style + native Index: secure/lib/libcrypto/Makefile =================================================================== --- secure/lib/libcrypto/Makefile (revision 222101) +++ secure/lib/libcrypto/Makefile (working copy) @@ -20,15 +20,26 @@ MAN+= config.5 des_modes.7 .include "Makefile.inc" # base sources -SRCS= cpt_err.c cryptlib.c cversion.c ebcdic.c ex_data.c mem.c mem_clr.c \ - mem_dbg.c o_dir.c o_str.c o_time.c tmdiff.c uid.c dyn_lck.c \ - o_init.c fips_err.c +SRCS= cpt_err.c cryptlib.c cversion.c dyn_lck.c ebcdic.c ex_data.c fips_err.c \ + mem.c mem_clr.c mem_dbg.c o_dir.c o_init.c o_str.c o_time.c tmdiff.c \ + uid.c fips_err.c +.if ${MACHINE_CPUARCH} == "amd64" +SRCS+= x86_64cpuid.S +.elif ${MACHINE_CPUARCH} == "i386" +SRCS+= x86cpuid.s +.endif INCS= crypto.h ebcdic.h opensslv.h ossl_typ.h symhacks.h tmdiff.h \ ../e_os.h ../e_os2.h # aes -SRCS+= aes_cbc.c aes_cfb.c aes_core.c aes_ctr.c aes_ecb.c aes_ige.c \ - aes_misc.c aes_ofb.c aes_wrap.c +SRCS+= aes_cfb.c aes_ctr.c aes_ecb.c aes_ige.c aes_misc.c aes_ofb.c aes_wrap.c +.if ${MACHINE_CPUARCH} == "amd64" +SRCS+= aes-x86_64.S +.elif ${MACHINE_CPUARCH} == "i386" +SRCS+= aes-586.s +.else +SRCS+= aes_cbc.c aes_core.c +.endif INCS+= aes.h aes_locl.h # asn1 @@ -61,10 +72,10 @@ SRCS+= bf_enc.c INCS+= blowfish.h # bio -SRCS+= b_dump.c b_print.c b_sock.c bf_buff.c bf_lbuf.c bf_nbio.c \ - bf_null.c bio_cb.c bio_err.c bio_lib.c bss_acpt.c bss_bio.c \ - bss_conn.c bss_dgram.c bss_fd.c bss_file.c bss_log.c bss_mem.c \ - bss_null.c bss_sock.c +SRCS+= b_dump.c b_print.c b_sock.c bf_buff.c bf_nbio.c bf_null.c \ + bio_cb.c bio_err.c bio_lib.c bss_acpt.c bss_bio.c bss_conn.c \ + bss_dgram.c bss_fd.c bss_file.c bss_log.c bss_mem.c bss_null.c \ + bss_sock.c INCS+= bio.h bio_lcl.h # bn @@ -76,7 +87,7 @@ SRCS+= bn_add.c bn_blind.c bn_const.c bn_ctx.c bn_ .if ${MACHINE_CPUARCH} == "i386" SRCS+= bn-586.s co-586.s .elif ${MACHINE_CPUARCH} == "amd64" -SRCS+= x86_64-gcc.c +SRCS+= x86_64-gcc.c x86_64-mont.S .else SRCS+= bn_asm.c .endif @@ -208,7 +219,9 @@ INCS+= md4.h # md5 SRCS+= md5_dgst.c md5_one.c -.if ${MACHINE_CPUARCH} == "i386" +.if ${MACHINE_CPUARCH} == "amd64" +SRCS+= md5-x86_64.S +.elif ${MACHINE_CPUARCH} == "i386" SRCS+= md5-586.s .endif INCS+= md5.h @@ -254,11 +267,13 @@ SRCS+= rc2_cbc.c rc2_ecb.c rc2_skey.c rc2cfb64.c r INCS+= rc2.h # rc4 -SRCS+= rc4_skey.c rc4_fblk.c -.if ${MACHINE_CPUARCH} == "i386" -SRCS+= rc4-586.s +SRCS+= rc4_fblk.c +.if ${MACHINE_CPUARCH} == "amd64" +SRCS+= rc4-x86_64.S +.elif ${MACHINE_CPUARCH} == "i386" +SRCS+= rc4-586.s rc4_skey.c .else -SRCS+= rc4_enc.c +SRCS+= rc4_enc.c rc4_skey.c .endif INCS+= rc4.h @@ -284,8 +299,10 @@ INCS+= rsa.h # sha SRCS+= sha1_one.c sha1dgst.c sha_dgst.c sha_one.c sha256.c sha512.c -.if ${MACHINE_CPUARCH} == "i386" -SRCS+= sha1-586.s +.if ${MACHINE_CPUARCH} == "amd64" +SRCS+= sha1-x86_64.S sha256-x86_64.S sha512-x86_64.S +.elif ${MACHINE_CPUARCH} == "i386" +SRCS+= sha1-586.s sha512-sse2.s .endif INCS+= sha.h @@ -348,6 +365,9 @@ CSTD= gnu89 .if !empty(SRCS:M*.s) AFLAGS+= --noexecstack .endif +.if !empty(SRCS:M*.S) +ACFLAGS+= -Wa,--noexecstack +.endif CLEANFILES= buildinf.h opensslconf.h evp.h @@ -368,12 +388,11 @@ evp.h: ${LCRYPTO_SRC}/crypto/evp/evp.h cp -f ${.ALLSRC} ${.TARGET} .endif -# No FIPS support for now -fips.h: - echo '/* dummy fips.h */' > ${.TARGET} +fips.h: ${LCRYPTO_SRC}/fips/fips.h + cp -f ${.ALLSRC} ${.TARGET} -fips_rand.h: - echo '/* dummy fips_rand.h */' > ${.TARGET} +fips_rand.h: ${LCRYPTO_SRC}/fips/rand/fips_rand.h + cp -f ${.ALLSRC} ${.TARGET} CLEANFILES+= fips.h fips_rand.h @@ -387,7 +406,9 @@ afterinstall: .include -.if ${MACHINE_CPUARCH} == "i386" +.if ${MACHINE_CPUARCH} == "amd64" +.PATH: ${.CURDIR}/amd64 +.elif ${MACHINE_CPUARCH} == "i386" .PATH: ${.CURDIR}/i386 .endif